java.lang.Class<T> connectorClass
SourceRecordMapper<T> fn
java.lang.Long milisecondsToRun
java.lang.Integer maxRecords
TupleTag<V> responseTag
TupleTag<V> failureTag
org.apache.beam.io.requestresponse.RequestResponseIO.Configuration<RequestT,ResponseT> rrioConfiguration
org.apache.beam.io.requestresponse.Call.Configuration<RequestT,ResponseT> callConfiguration
PCollectionView<T> view
boolean streaming
DataflowPipelineJob job
PCollectionView<T> view
Coder<T> dataCoder
java.util.Map<K,V> properties
PCollectionView<T> view
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
int intTag
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
org.apache.beam.runners.core.metrics.MetricsContainerStepMap metricsContainers
ReferenceCountingExecutableStageContextFactory.Creator creator
SerializableFunction<InputT,OutputT> isReleaseSynchronous
long count
org.apache.beam.runners.core.metrics.DistributionData distributionData
org.apache.beam.runners.core.metrics.GaugeData gaugeData
java.lang.String stepName
java.lang.String metricsKey
java.util.Map<K,V> counters
java.util.Map<K,V> distributions
java.util.Map<K,V> gauges
java.util.Map<K,V> stringSets
com.hazelcast.map.IMap<K,V> accumulator
org.apache.beam.runners.core.metrics.StringSetData stringSetData
int num
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
Duration maxReadTime
int numInitialSplits
long maxNumRecords
int sourceId
double readerCacheInterval
int splitId
BoundedSource<T> source
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
int numPartitions
long bundleSize
java.lang.String stepName
MetricsContainerStepMapAccumulator metricsAccum
MicrobatchSource<T,CheckpointMarkT extends UnboundedSource.CheckpointMark> microbatchSource
org.apache.beam.runners.core.construction.SerializablePipelineOptions options
org.apache.spark.Partitioner partitioner
org.apache.beam.runners.core.metrics.MetricsContainerStepMap value
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.Table<R,C,V> state
java.util.Collection<E> serTimers
org.apache.beam.runners.core.metrics.MetricsContainerStepMap value
WindowingStrategy<T,W extends BoundedWindow> windowingStrategy
org.apache.beam.runners.core.StateInternalsFactory<K> stateInternalsFactory
org.apache.beam.runners.core.SystemReduceFn<K,InputT,AccumT,OutputT,W extends BoundedWindow> reduceFn
java.util.function.Supplier<T> options
java.util.Map<K,V> sideInputs
byte[] value
java.util.HashMap<K,V> sideInputDataSets
java.util.Set<E> leaves
protected java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
protected java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
byte[] windowFnBytes
protected java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
byte[] wvCoderBytes
protected java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
byte[] keyCoderBytes
byte[] wvCoderBytes
protected java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
byte[] keyCoderBytes
byte[] wvCoderBytes
protected java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
org.apache.beam.runners.core.StepContext stepcontext
byte[] doFnwithExBytes
byte[] coderBytes
java.util.Map<K,V> outputCodersBytes
byte[] windowBytes
java.util.Map<K,V> sideInputBytes
java.lang.String serializedOptions
java.util.List<E> serializedSideOutputs
java.util.Map<K,V> serializedOutputMap
protected java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
byte[] wvCoderBytes
protected java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
org.apache.beam.runners.core.SystemReduceFn<K,InputT,AccumT,OutputT,W extends BoundedWindow> reduceFn
byte[] windowBytes
boolean impulseEmitted
protected java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
byte[] keyCoderBytes
byte[] wvCoderBytes
int tag
protected java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
byte[] coderBytes
java.lang.String outputPrefix
SerializableFunction<InputT,OutputT> firstTimeCallback
CannotProvideCoderException.ReasonCode reason
Coder<T> coder
DelegateCoder.CodingFunction<InputT,OutputT> toFn
DelegateCoder.CodingFunction<InputT,OutputT> fromFn
TypeDescriptor<T> typeDescriptor
private void readObject(java.io.ObjectInputStream arg0) throws java.io.IOException, java.lang.ClassNotFoundException
java.io.IOException
java.lang.ClassNotFoundException
private void writeObject(java.io.ObjectOutputStream arg0) throws java.io.IOException
java.io.IOException
java.lang.String decodingException
java.lang.String encodingException
java.lang.String serializationException
java.lang.String deserializationException
java.lang.String exceptionMessage
java.lang.Class<T> type
org.apache.beam.sdk.testing.ExpectedLogs expectedLogs
TestPipeline p
Coder<T> keyCoder
VarIntCoder shardNumberCoder
DelegateCoder<T,IntermediateT> delegateCoder
java.lang.Class<T> clazz
AvroDatumFactory<T> datumFactory
org.apache.beam.sdk.extensions.avro.coders.AvroCoder.SerializableSchemaSupplier schemaSupplier
TypeDescriptor<T> typeDescriptor
java.util.List<E> nonDeterministicReasons
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> decoder
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> encoder
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> writer
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> reader
java.lang.Class<T> type
AvroIO.TypedWrite<UserT,DestinationT,OutputT> inner
java.lang.Class<T> type
int syncInterval
private java.lang.Object readResolve() throws java.io.ObjectStreamException
java.io.ObjectStreamException
org.apache.beam.sdk.extensions.avro.io.AvroSource.Mode<T> mode
GcsPath gcsPath
java.lang.String bucket
java.lang.String object
java.lang.Class<T> inputClass
ObjectMapper customMapper
InferableFunction<InputT,OutputT> exceptionHandler
java.lang.Class<T> outputClass
ObjectMapper customMapper
InferableFunction<InputT,OutputT> exceptionHandler
java.lang.Object leftNullValue
java.lang.Object rightNullValue
java.lang.Object nullValue
java.lang.Object nullValue
PCollectionView<T> contextSideInput
java.util.List<E> featureList
java.util.List<E> featureList
PCollectionView<T> contextSideInput
java.util.List<E> featureList
java.util.List<E> featureList
java.lang.Class<T> eventTClass
java.lang.Class<T> keyTClass
java.lang.Class<T> stateTClass
java.lang.Class<T> resultTClass
int maxOutputElementsPerBundle
Duration statusUpdateFrequency
boolean produceStatusUpdateOnEveryEvent
EventExaminer<EventT,StateT extends MutableState<EventT,?>> eventExaminer
NullableCoder<T> initialSequenceCoder
VarIntCoder numberOfRangesCoder
VarLongCoder dataCoder
private void readObject(java.io.ObjectInputStream ois) throws java.lang.ClassNotFoundException, java.io.IOException
java.lang.ClassNotFoundException
java.io.IOException
private void writeObject(java.io.ObjectOutputStream oos) throws java.io.IOException
java.io.IOException
java.lang.Class<T> protoMessageClass
Message
type to be coded.java.util.Set<E> extensionHostClasses
ProtoCoder
. The extensions from these
classes will be included in the ExtensionRegistry
used during encoding and decoding.private void readObject(java.io.ObjectInputStream ois) throws java.io.IOException
java.io.IOException
private void writeObject(java.io.ObjectOutputStream oos) throws java.io.IOException
java.io.IOException
private java.lang.Object readResolve()
org.apache.beam.sdk.extensions.protobuf.ProtoDynamicMessageSchema.Context<T> context
java.lang.String fullyQualifiedName
java.lang.String expansionService
java.util.List<E> extraPackages
java.util.SortedMap<K,V> kwargsMap
java.util.Map<K,V> typeHints
java.lang.Object[] argsArray
Row providedKwargsRow
java.util.Map<K,V> outputCoders
java.lang.String func
boolean includeIndexes
java.lang.String expansionService
SerializableIr ir
SbeSchema.IrOptions irOptions
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableList<E> sbeFields
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException
java.io.IOException
java.lang.ClassNotFoundException
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException
java.io.IOException
Ir ir
double compression
java.lang.String tempLocation
int memoryMB
ExternalSorter.Options.SorterType sorterType
java.lang.String tempLocation
int memoryMB
ExternalSorter.Options.SorterType sorterType
long memoryMB
BufferedExternalSorter.Options sorterOptions
Combine.CombineFn<InputT,AccumT,OutputT> delegate
int id
java.lang.String name
java.lang.String countryOfResidence
int id
int customerId
boolean unknown
java.lang.Double rowCount
java.lang.Double rate
java.util.List<E> functionPath
java.lang.String jarPath
Combine.CombineFn<InputT,AccumT,OutputT> combineFn
CEPOperator operator
java.util.List<E> operands
java.lang.String alpha
int fieldIndex
Schema.TypeName typeName
java.lang.String outTableName
CEPOperation opr
CEPFieldRef fieldRef
Schema.FieldType fieldType
CEPKind cepKind
java.lang.String patternVar
CEPCall patternCondition
Quantifier quant
int fIndex
boolean dir
boolean nullFirst
java.lang.String repr
org.apache.beam.sdk.extensions.sql.impl.nfa.NFA.State startState
java.util.ArrayList<E> currentRuns
Schema upstreamSchema
BeamRelNode beamRelNode
boolean all
BeamSetOperatorRelBase.OpType opType
java.util.List<E> fieldsIndices
java.util.List<E> orientation
java.util.List<E> nullsFirst
BeamSqlSeekableTable seekableTable
Schema lkpSchema
int factColOffset
Schema joinSubsetType
Schema outputSchema
java.util.List<E> factJoinIdx
java.lang.String leftTag
java.lang.String rightTag
BeamSetOperatorRelBase.OpType opType
boolean all
Combine.CombineFn<InputT,AccumT,OutputT> countFn
boolean isSample
SerializableFunction<InputT,OutputT> decimalConverter
boolean isSample
SerializableFunction<InputT,OutputT> decimalConverter
java.lang.String delimiter
java.lang.String delimiter
java.util.List<E> indexes
int index
Schema schema
java.lang.String projectId
java.lang.String instanceId
java.lang.String tableId
java.lang.String emulatorHost
boolean useFlatSchema
java.util.Map<K,V> columnsMapping
CSVFormat csvFormat
java.lang.String bootstrapServers
java.util.List<E> topics
java.util.List<E> topicPartitions
java.util.Map<K,V> configUpdates
BeamTableStatistics rowCountStatistics
int numberOfRecordsForRate
PayloadSerializer serializer
java.util.regex.Pattern locationPattern
java.lang.String dbCollection
java.lang.String dbName
java.lang.String dbUri
Schema schema
java.util.List<E> rows
Table table
java.util.List<E> rows
long tableProviderInstanceId
java.util.List<E> timestampedRows
int timestampField
BeamTableStatistics statistics
PTransform<InputT extends PInput,OutputT extends POutput> readConverter
PTransform<InputT extends PInput,OutputT extends POutput> writeConverter
java.lang.String filePattern
BeamTableStatistics rowCountStatistics
FixedWindows bucketWindows
FixedWindows gcWindows
Instant stopTime
long maxGapFillBuckets
SerializableBiFunction<FirstInputT,SecondInputT,OutputT> mergeValues
SerializableFunction<InputT,OutputT> interpolateFunction
TimerSpec gapFillingTimersSpec
TimerSpec gcTimersSpec
StateSpec<StateT extends State> seenBucketsSpec
StateSpec<StateT extends State> gapDurationSpec
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
long maxNumRecords
Duration maxReadTime
int numMessagesPerShard
int shardNumber
boolean dedup
boolean throwOnFirstSnapshot
boolean allowSplitting
java.lang.String path
FileBasedSource<T> sourceDelegate
CompressedSource.DecompressingChannelFactory channelFactory
DefaultFilenamePolicy.Params params
ValueProvider<T> baseFilename
java.lang.String shardTemplate
boolean explicitTemplate
java.lang.String suffix
FileBasedSink.DynamicDestinations<UserT,DestinationT,OutputT> dynamicDestinations
FileBasedSink.WritableByteChannelFactory writableByteChannelFactory
FileBasedSink.WritableByteChannelFactory
that is used to wrap the raw data output to the
underlying channel. The default is to not compress the output using Compression.UNCOMPRESSED
.ValueProvider<T> tempDirectoryProvider
FileBasedSink<UserT,DestinationT,OutputT> sink
ValueProvider<T> baseTempDirectory
org.apache.beam.sdk.io.FileBasedSink.WriteOperation.TempSubDirType tempSubdirType
java.util.UUID subdirUUID
boolean windowedWrites
ValueProvider<T> fileOrPatternSpec
EmptyMatchTreatment emptyMatchTreatment
MatchResult.Metadata singleFileMetadata
FileBasedSource.Mode mode
java.util.concurrent.atomic.AtomicReference<V> filesSizeBytes
long startOffset
long endOffset
long minBundleSize
BoundedSource<T> source
UnboundedSource<OutputT,CheckpointMarkT extends UnboundedSource.CheckpointMark> source
long desiredBundleSizeBytes
SerializableFunction<InputT,OutputT> createSource
Coder<T> coder
ReadAllViaFileBasedSource.ReadFileRangesFnExceptionHandler exceptionHandler
boolean usesReshuffle
SerializableFunction<InputT,OutputT> createSource
ReadAllViaFileBasedSource.ReadFileRangesFnExceptionHandler exceptionHandler
long desiredBundleSizeBytes
TextIO.TypedWrite<UserT,DestinationT> inner
byte[] delimiter
int skipHeaderLines
FileBasedSink.WriteOperation<DestinationT,OutputT> writeOperation
java.lang.String accessKey
java.lang.String secretKey
Regions region
java.lang.String serviceEndpoint
WatermarkPolicyFactory.CustomWatermarkPolicy watermarkPolicy
WatermarkParameters watermarkParameters
CosmosIO.Read<T> spec
NormalizedRange range
java.lang.Long estimatedByteSize
java.lang.String minInclusive
java.lang.String maxExclusive
java.math.BigInteger start
java.math.BigInteger end
CsvWriteTransformProvider.CsvWriteConfiguration configuration
int backendVersion
ElasticsearchIO.Read spec
java.lang.Integer numSlices
java.lang.Integer sliceId
java.lang.Long estimatedByteSize
ElasticsearchIO.DocToBulk docToBulk
ElasticsearchIO.BulkIO bulkIO
BigQueryServices testBigQueryServices
BigQueryServices
used for testing.BigQueryExportReadSchemaTransformConfiguration configuration
BigQueryIO.TypedRead<T> inner
private void readObject(java.io.ObjectInputStream in) throws java.lang.ClassNotFoundException, java.io.IOException
java.lang.ClassNotFoundException
java.io.IOException
ValueProvider<T> tableReferenceProvider
boolean projectionPushdownApplied
BigQueryIO.Write.CreateDisposition createDisposition
BigQueryServices bqServices
DynamicDestinations<T,DestinationT> dynamicDestinations
java.lang.String kmsKey
DynamicDestinations<T,DestinationT> dynamicDestinations
SerializableFunction<InputT,OutputT> formatFunction
org.apache.beam.sdk.io.gcp.bigquery.StorageApiDynamicDestinations<T,DestinationT> dynamicDestinations
BigQueryServices bqServices
TupleTag<V> failedWritesTag
TupleTag<V> successfulWritesTag
Coder<T> errorCoder
Coder<T> successCoder
SerializableFunction<InputT,OutputT> rowMutationFn
BadRecordRouter badRecordRouter
org.apache.beam.sdk.io.gcp.bigquery.StorageApiDynamicDestinations<T,DestinationT> dynamicDestinations
org.apache.beam.sdk.io.gcp.bigquery.TwoLevelMessageConverterCache<DestinationT,ElementT> messageConverters
BigQueryServices bqServices
TupleTag<V> failedWritesTag
TupleTag<V> successfulWritesTag
SerializableFunction<InputT,OutputT> rowMutationFn
BadRecordRouter badRecordRouter
Coder<T> elementCoder
SerializableFunction<InputT,OutputT> formatFunction
SerializableFunction<InputT,OutputT> formatRecordOnFailureFunction
boolean usesCdc
BigQueryIO.Write.CreateDisposition createDisposition
boolean ignoreUnknownValues
boolean autoSchemaUpdates
BigQueryServices bqServices
Counter flushOperationsSent
Counter flushOperationsSucceeded
Counter flushOperationsFailed
Counter flushOperationsAlreadyExists
Counter flushOperationsInvalidArgument
Distribution flushLatencyDistribution
Counter finalizeOperationsSent
Counter finalizeOperationsSucceeded
Counter finalizeOperationsFailed
TupleTag<V> successfulConvertedRowsTag
TupleTag<V> failedRowsTag
TupleTag<V> successfulWrittenRowsTag
java.util.function.Predicate<T> successfulRowsPredicate
Coder<T> destinationCoder
org.apache.beam.sdk.io.gcp.bigquery.StorageApiDynamicDestinations<T,DestinationT> dynamicDestinations
SerializableFunction<InputT,OutputT> rowUpdateFn
BigQueryIO.Write.CreateDisposition createDisposition
java.lang.String kmsKey
Duration triggeringFrequency
BigQueryServices bqServices
int numShards
boolean allowInconsistentWrites
boolean allowAutosharding
boolean autoUpdateSchema
boolean ignoreUnknownValues
boolean usesCdc
com.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation defaultMissingValueInterpretation
BadRecordRouter badRecordRouter
ErrorHandler<ErrorT,OutputT extends POutput> badRecordErrorHandler
org.apache.beam.sdk.io.gcp.bigquery.StorageApiDynamicDestinations<T,DestinationT> dynamicDestinations
BigQueryServices bqServices
TupleTag<V> failedRowsTag
TupleTag<V> successfulRowsTag
java.util.function.Predicate<T> successfulRowsPredicate
TupleTag<V> finalizeTag
Coder<T> failedRowsCoder
Coder<T> successfulRowsCoder
boolean autoUpdateSchema
boolean ignoreUnknownValues
BigQueryIO.Write.CreateDisposition createDisposition
java.lang.String kmsKey
boolean usesCdc
com.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation defaultMissingValueInterpretation
org.apache.beam.sdk.io.gcp.bigquery.StorageApiDynamicDestinations<T,DestinationT> dynamicDestinations
BigQueryIO.Write.CreateDisposition createDisposition
java.lang.String kmsKey
BigQueryServices bqServices
Coder<T> destinationCoder
Coder<T> failedRowsCoder
boolean autoUpdateSchema
boolean ignoreUnknownValues
com.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation defaultMissingValueInterpretation
Duration streamIdleTime
TupleTag<V> failedRowsTag
TupleTag<V> successfulRowsTag
java.util.function.Predicate<T> successfulRowsPredicate
Coder<T> succussfulRowsCoder
TupleTag<V> flushTag
org.apache.beam.sdk.io.gcp.bigquery.StorageApiDynamicDestinations<T,DestinationT> dynamicDestinations
BigQueryServices bqServices
TupleTag<V> failedRowsTag
TupleTag<V> successfulRowsTag
java.util.function.Predicate<T> successfulRowsPredicate
TupleTag<V> finalizeTag
Coder<T> failedRowsCoder
Coder<T> successfulRowsCoder
boolean autoUpdateSchema
boolean ignoreUnknownValues
BigQueryIO.Write.CreateDisposition createDisposition
java.lang.String kmsKey
boolean usesCdc
com.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation defaultMissingValueInterpretation
BigQueryServices bigQueryServices
BigQueryIO.Write.CreateDisposition createDisposition
DynamicDestinations<T,DestinationT> dynamicDestinations
InsertRetryPolicy retryPolicy
boolean extendedErrorInfo
boolean skipInvalidRows
boolean ignoreUnknownValues
boolean ignoreInsertIds
boolean autoSharding
boolean propagateSuccessful
java.lang.String kmsKey
Coder<T> elementCoder
SerializableFunction<InputT,OutputT> toTableRow
SerializableFunction<InputT,OutputT> toFailsafeTableRow
SerializableFunction<InputT,OutputT> deterministicRecordIdFn
BigQueryServices bigQueryServices
InsertRetryPolicy retryPolicy
boolean extendedErrorInfo
boolean skipInvalidRows
boolean ignoreUnknownValues
boolean ignoreInsertIds
boolean autoSharding
boolean propagateSuccessful
Coder<T> elementCoder
SerializableFunction<InputT,OutputT> toTableRow
SerializableFunction<InputT,OutputT> toFailsafeTableRow
SerializableFunction<InputT,OutputT> deterministicRecordIdFn
java.lang.String tableSpec
java.lang.String tableDescription
java.lang.String jsonTimePartitioning
java.lang.String jsonClustering
BigQueryServices bqServices
PCollectionView<T> zeroLoadJobIdPrefixView
ValueProvider<T> loadJobProjectId
int maxRetryJobs
java.lang.String kmsKey
BigQueryServices.JobService jobService
java.util.Set<E> schemaUpdateOptions
BigQueryIO.Write.WriteDisposition writeDisposition
BigQueryIO.Write.CreateDisposition createDisposition
DynamicDestinations<T,DestinationT> dynamicDestinations
java.util.Map<K,V> pendingJobs
org.apache.beam.sdk.io.gcp.bigquery.BigQueryHelpers.PendingJob retryJob
TableDestination tableDestination
java.util.List<E> tempTables
BoundedWindow window
BigQueryServices bqServices
BigQueryServices bqServices
PCollectionView<T> jobIdToken
BigQueryIO.Write.WriteDisposition firstPaneWriteDisposition
BigQueryIO.Write.CreateDisposition firstPaneCreateDisposition
int maxRetryJobs
java.lang.String kmsKey
ValueProvider<T> loadJobProjectId
java.util.List<E> pendingJobs
BigQueryServices testBigQueryServices
BigQueryDirectReadSchemaTransformProvider.BigQueryDirectReadSchemaTransformConfiguration configuration
BigQueryServices testBigQueryServices
BigQueryServices
used for testing.BigQueryWriteConfiguration configuration
BigQueryServices testBigQueryServices
BigQueryWriteConfiguration configuration
BigQueryWriteConfiguration configuration
org.apache.beam.sdk.util.RowStringInterpolator interpolator
java.util.List<E> primaryKey
org.apache.beam.sdk.util.RowFilter rowFilter
java.util.Map<K,V> columnFamilyMapping
java.util.Map<K,V> columnFamilyMapping
org.apache.beam.sdk.io.gcp.bigtable.CellValueParser cellValueParser
BigtableConfig bigtableConfig
org.apache.beam.sdk.io.gcp.bigtable.BigtableWriteOptions bigtableWriteOptions
org.apache.beam.sdk.io.gcp.bigtable.BigtableServiceFactory factory
ErrorHandler<ErrorT,OutputT extends POutput> badRecordErrorHandler
BadRecordRouter badRecordRouter
Schema schema
Schema schema
java.util.Map<K,V> columnsMapping
BigtableConfig changeStreamConfig
BigtableConfig metadataTableConfig
java.lang.String tableId
java.lang.String metadataTableId
java.lang.String changeStreamName
Instant endTime
DaoFactory daoFactory
ChangeStreamMetrics metrics
ActionFactory actionFactory
DetectNewPartitionsAction detectNewPartitionsAction
DaoFactory daoFactory
Instant startTime
BigtableIO.ExistingPipelineOptions existingPipelineOptions
DaoFactory daoFactory
ChangeStreamMetrics metrics
ActionFactory actionFactory
Duration backlogReplicationAdjustment
SizeEstimator<T> sizeEstimator
ReadChangeStreamPartitionAction readChangeStreamPartitionAction
org.apache.beam.sdk.util.SerializableSupplier<T> clock
Instant startTime
boolean resume
com.google.cloud.bigtable.data.v2.models.Range.ByteStringRange partition
Instant startTime
java.util.List<E> changeStreamContinuationTokens
Instant endTime
java.lang.String uuid
Instant parentLowWatermark
java.util.List<E> parentPartitions
com.google.cloud.bigtable.data.v2.models.Range.ByteStringRange partition
Instant watermark
com.google.cloud.bigtable.data.v2.models.ChangeStreamContinuationToken currentToken
Instant estimatedLowWatermark
java.math.BigDecimal throughputEstimate
Instant lastRunTimestamp
com.google.cloud.bigtable.data.v2.models.CloseStream closeStream
boolean failToLock
boolean isHeartbeat
Schema dataSchema
java.lang.String location
java.lang.String kind
java.lang.String projectId
java.lang.String keyField
Schema schema
java.lang.String keyField
ValueProvider<T> numWorkers
PCollectionView<T> firstInstantSideInput
Counter throttlingMsecs
java.util.function.Supplier<T> keySupplier
java.lang.String kind
java.lang.String keyField
java.util.List<E> writeFailures
boolean nameOnlyQuery
com.google.firestore.v1.Write write
com.google.firestore.v1.WriteResult writeResult
com.google.rpc.Status status
int numWrites
long numBytes
int maxAttempts
Duration initialBackoff
Duration samplePeriod
Duration samplePeriodBucketSize
double overloadRatio
Duration throttleDuration
int batchInitialCount
int batchMaxCount
long batchMaxBytes
Duration batchTargetLatency
int hintMaxNumWorkers
boolean shouldReportDiagnosticMetrics
ValueProvider<T> sourceFhirStore
ValueProvider<T> destinationFhirStore
ValueProvider<T> deidConfig
HealthcareApiClient client
ValueProvider<T> destinationFhirStore
java.lang.String deidConfigJson
ValueProvider<T> fhirStore
ValueProvider<T> fhirStore
ValueProvider<T> exportUri
HealthcareApiClient client
ValueProvider<T> exportUri
ValueProvider<T> fhirStore
ValueProvider<T> deadLetterGcsPath
ValueProvider<T> tempGcsPath
FhirIO.Import.ContentStructure contentStructure
ValueProvider<T> fhirStore
NullableCoder<T> originalCoder
org.apache.beam.sdk.io.gcp.healthcare.HL7v2IO.HL7v2MessageClient client
ValueProvider<T> hl7v2Stores
ValueProvider<T> filter
Duration initialSplitDuration
org.apache.beam.sdk.io.gcp.healthcare.HL7v2IO.HL7v2MessageClient client
int statusCode
int maxPublishBatchSize
SerializableFunction<InputT,OutputT> formatFunction
SerializableFunction<InputT,OutputT> topicFunction
BadRecordRouter badRecordRouter
Coder<T> inputCoder
TupleTag<V> outputTag
java.lang.String projectId
java.lang.String projectId
java.lang.String schemaId
java.lang.String projectId
java.lang.String subscriptionName
java.lang.String path
org.apache.beam.sdk.io.gcp.pubsub.PubsubIO.PubsubSubscription.Type type
java.lang.String project
java.lang.String subscription
org.apache.beam.sdk.io.gcp.pubsub.PubsubIO.PubsubTopic.Type type
java.lang.String project
java.lang.String topic
int maxPublishBatchByteSize
int maxPublishBatchSize
PubsubClient.PubsubClientFactory pubsubFactory
ValueProvider<T> topic
java.lang.String timestampAttribute
java.lang.String idAttribute
int numShards
int publishBatchSize
int publishBatchBytes
Duration maxLatency
org.apache.beam.sdk.io.gcp.pubsub.PubsubUnboundedSink.RecordIdMethod recordIdMethod
PubsubUnboundedSink.idAttribute
is non-null).java.lang.String pubsubRootUrl
com.google.api.client.util.Clock clock
PubsubClient.PubsubClientFactory pubsubFactory
ValueProvider<T> project
PubsubUnboundedSource.topic
was given.ValueProvider<T> topic
PubsubUnboundedSource.subscription
must be given. Otherwise
PubsubUnboundedSource.subscription
must be null.ValueProvider<T> subscription
PubsubUnboundedSource.topic
must be given. Otherwise
PubsubUnboundedSource.topic
must be null.
If no subscription is given a random one will be created when the transorm is applied. This field will be update with that subscription's path. The created subscription is never deleted.
java.lang.String timestampAttribute
java.lang.String idAttribute
boolean needsAttributes
boolean needsMessageId
boolean needsOrderingKey
SerializableFunction<InputT,OutputT> valueMapper
java.util.Set<E> attributes
java.lang.String attributesMap
Schema payloadSchema
Schema errorSchema
boolean useErrorOutput
SerializableFunction<InputT,OutputT> valueMapper
Counter errorCounter
java.lang.Long errorsInBundle
boolean handleErrors
java.util.List<E> attributes
java.lang.String attributeMap
Schema errorSchema
Schema attributeSchema
java.lang.String attributeId
java.lang.String attributeId
SerializableFunction<InputT,OutputT> newInstance
java.util.Map<K,V> instances
org.apache.beam.sdk.io.gcp.pubsublite.internal.ProtoFromBytes.ProtoParser<T2> parser
PublisherOptions options
SubscriberOptions options
SubscriberOptions subscriberOptions
org.apache.beam.sdk.io.gcp.pubsublite.internal.UnboundedSourceImpl.SubscriberFactory subscriberFactory
org.apache.beam.sdk.io.gcp.pubsublite.internal.UnboundedSourceImpl.BacklogReaderFactory readerFactory
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Optional<T> partition
UuidDeduplicationOptions options
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableList<E> mutations
SpannerConfig config
PCollectionView<T> dialectView
java.util.Set<E> allowedTableNames
SpannerIO.Write spec
java.util.Set<E> enabledMetrics
ChangeStreamMetrics
default constructor.SpannerConfig changeStreamSpannerConfig
SpannerConfig metadataSpannerConfig
java.lang.String changeStreamName
PartitionMetadataTableNames partitionMetadataTableNames
com.google.cloud.spanner.Options.RpcPriority rpcPriority
java.lang.String jobName
com.google.cloud.spanner.Dialect spannerChangeStreamDatabaseDialect
com.google.cloud.spanner.Dialect metadataDatabaseDialect
java.lang.String tableName
java.lang.String watermarkIndexName
java.lang.String createdAtIndexName
DaoFactory daoFactory
Duration resumeDuration
DaoFactory daoFactory
MapperFactory mapperFactory
ActionFactory actionFactory
ChangeStreamMetrics metrics
long averagePartitionBytesSize
boolean averagePartitionBytesSizeSet
DaoFactory daoFactory
MapperFactory mapperFactory
com.google.cloud.Timestamp startTimestamp
com.google.cloud.Timestamp endTimestamp
ChangeStreamMetrics metrics
DaoFactory daoFactory
MapperFactory mapperFactory
ActionFactory actionFactory
ChangeStreamMetrics metrics
ThroughputEstimator<T> throughputEstimator
ReadChangeStreamPartitionDoFn.setThroughputEstimator(BytesThroughputEstimator)
call.java.util.Deque<E> deque
int windowSizeSeconds
SizeEstimator<T> sizeEstimator
com.google.cloud.spanner.Dialect spannerChangeStreamDatabaseDialect
java.lang.String partitionToken
com.google.cloud.Timestamp recordTimestamp
com.google.cloud.Timestamp partitionStartTimestamp
com.google.cloud.Timestamp partitionEndTimestamp
com.google.cloud.Timestamp partitionCreatedAt
com.google.cloud.Timestamp partitionScheduledAt
com.google.cloud.Timestamp partitionRunningAt
com.google.cloud.Timestamp queryStartedAt
com.google.cloud.Timestamp recordStreamStartedAt
com.google.cloud.Timestamp recordStreamEndedAt
com.google.cloud.Timestamp recordReadAt
long totalStreamTimeMillis
long numberOfRecordsRead
java.lang.String token
java.util.HashSet<E> parentTokens
com.google.cloud.Timestamp startTimestamp
java.lang.String recordSequence
java.util.List<E> childPartitions
ChangeStreamRecordMetadata metadata
java.lang.String name
TypeCode type
boolean isPrimaryKey
long ordinalPosition
java.lang.String partitionToken
com.google.cloud.Timestamp commitTimestamp
java.lang.String serverTransactionId
boolean isLastRecordInTransactionInPartition
java.lang.String recordSequence
java.lang.String tableName
java.util.List<E> rowType
java.util.List<E> mods
ModType modType
ValueCaptureType valueCaptureType
long numberOfRecordsInTransaction
long numberOfPartitionsInTransaction
java.lang.String transactionTag
boolean isSystemTransaction
ChangeStreamRecordMetadata metadata
com.google.cloud.Timestamp timestamp
ChangeStreamRecordMetadata metadata
java.lang.String keysJson
java.lang.String oldValuesJson
java.lang.String newValuesJson
java.lang.String partitionToken
java.util.HashSet<E> parentTokens
com.google.cloud.Timestamp startTimestamp
com.google.cloud.Timestamp endTimestamp
long heartbeatMillis
PartitionMetadata.State state
com.google.cloud.Timestamp watermark
com.google.cloud.Timestamp createdAt
com.google.cloud.Timestamp scheduledAt
com.google.cloud.Timestamp runningAt
com.google.cloud.Timestamp finishedAt
java.lang.String code
com.google.cloud.Timestamp from
com.google.cloud.Timestamp to
java.lang.String expectedChecksum
java.lang.String actualChecksum
BigqueryClient bigqueryClient
BigQueryServices.JobService jobService
FakeDatasetService datasetService
BigQueryServices.StorageClient storageClient
java.util.List<E> items
java.util.function.Function<T,R> shouldFailRow
java.util.Map<K,V> insertErrors
int numFailuresExpected
int numFailures
FakeDatasetService datasetService
public void readExternal(java.io.ObjectInput in) throws java.io.IOException, java.lang.ClassNotFoundException
java.io.IOException
java.lang.ClassNotFoundException
public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException
java.io.IOException
java.lang.Class<T> type
SerializableConfiguration conf
Coder<T> keyCoder
Coder<T> valueCoder
SimpleFunction<InputT,OutputT> keyTranslationFunction
SimpleFunction<InputT,OutputT> valueTranslationFunction
HadoopFormatIO.SerializableSplit inputSplit
boolean skipKeyClone
boolean skipValueClone
long boundedSourceEstimatedSize
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException
java.io.IOException
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException
java.io.IOException
InputSplit inputSplit
PTransform<InputT extends PInput,OutputT extends POutput> configTransform
ExternalSynchronization externalSynchronization
boolean withPartitioning
java.lang.String locksDir
org.apache.beam.sdk.io.hadoop.format.HDFSSynchronization.ThrowingFunction<T1,T2,X extends java.lang.Exception> fileSystemFactory
private java.lang.Object writeReplace()
Configuration configuration
java.lang.String tableId
org.apache.hadoop.hbase.client.Scan scan
private java.lang.Object writeReplace()
Configuration configuration
java.lang.String tableId
private java.lang.Object writeReplace()
Configuration configuration
java.lang.String tableId
JdbcIO.DataSourceConfiguration config
JdbcIO.DataSourceProviderFromDataSourceConfiguration config
JdbcIO.WriteVoid<T> inner
Schema schema
java.util.List<E> fieldExtractors
java.lang.String jmsMessageID
long jmsTimestamp
java.lang.String jmsCorrelationID
javax.jms.Destination jmsReplyTo
javax.jms.Destination jmsDestination
int jmsDeliveryMode
boolean jmsRedelivered
java.lang.String jmsType
long jmsExpiration
int jmsPriority
java.util.Map<K,V> properties
java.lang.String text
JsonWriteTransformProvider.JsonWriteConfiguration configuration
SerializableFunction<InputT,OutputT> serializableFunction
SerializableFunction<InputT,OutputT> schemaRegistryClientProviderFn
java.lang.String schemaRegistryUrl
java.lang.String subject
java.lang.Integer version
java.lang.String topic
int partition
long nextOffset
long watermarkMillis
KafkaIO.ReadSourceDescriptors<K,V> readSourceDescriptors
boolean use259implementation
KafkaIO.Read<K,V> read
SerializableFunction<InputT,OutputT> valueMapper
Counter errorCounter
java.lang.Long errorsInBundle
boolean handleErrors
Schema errorSchema
TopicPartition topicPartition
SerializableFunction<InputT,OutputT> toBytesFn
Counter errorCounter
java.lang.Long errorsInBundle
boolean handleErrors
Schema errorSchema
WatermarkPolicyFactory.CustomWatermarkPolicy watermarkPolicy
WatermarkParameters watermarkParameters
MongoDbGridFSIO.Read<T> spec
java.util.List<E> objectIds
Neo4jIO.DriverConfiguration config
SerializableFunction<InputT,OutputT> pulsarClientSerializableFunction
org.apache.pulsar.client.api.PulsarClient client
org.apache.pulsar.client.admin.PulsarAdmin admin
java.lang.String clientUrl
java.lang.String adminUrl
SerializableFunction<InputT,OutputT> extractOutputTimestampFn
org.apache.pulsar.client.api.Producer<T> producer
org.apache.pulsar.client.api.PulsarClient client
java.lang.String clientUrl
java.lang.String topic
java.lang.String routingKey
byte[] body
java.lang.String contentType
java.lang.String contentEncoding
java.util.Map<K,V> headers
java.lang.Integer deliveryMode
java.lang.Integer priority
java.lang.String correlationId
java.lang.String replyTo
java.lang.String expiration
java.lang.String messageId
java.util.Date timestamp
java.lang.String type
java.lang.String userId
java.lang.String appId
java.lang.String clusterId
org.apache.beam.vendor.grpc.v1p60p1.com.google.protobuf.ByteString value
long from
long to
java.lang.String cursor
long dbSize
boolean isStart
SnowflakeIO.DataSourceConfiguration config
ValueProvider<T> stagingBucketDir
java.lang.String tmpDirName
ValueProvider<T> quoteChar
SnowflakeDataType dataType
java.lang.String name
boolean isNullable
SnowflakeColumn[] columns
int precision
int scale
java.lang.Long size
java.lang.Long length
org.apache.beam.sdk.io.solace.SolaceIO.Read.Configuration.Builder<T> configurationBuilder
java.util.Queue<E> publishedResultsQueue
RetryCallableManager retryCallableManager
ObjectMapper objectMapper
org.apache.beam.sdk.io.solace.broker.SempBasicAuthClientExecutor sempBasicAuthClientExecutor
Queue queue
SolaceIO.Read.expand(org.apache.beam.sdk.values.PBegin)
method).
This could be used to associate the created SessionService with a specific queue for message
handling.SolaceIO.SubmissionMode submissionMode
Queue queue
java.lang.Integer maxNumConnections
Coder<T> coder
boolean enableDeduplication
SempClientFactory sempClientFactory
SessionServiceFactory sessionServiceFactory
SerializableFunction<InputT,OutputT> timestampFn
Duration watermarkIdleDurationThreshold
SerializableFunction<InputT,OutputT> parseFn
int shardCount
int shardKey
Distribution latencyPublish
Distribution latencyErrors
SerializableFunction<InputT,OutputT> destinationFn
SessionServiceFactory sessionServiceFactory
DeliveryMode deliveryMode
SolaceIO.SubmissionMode submissionMode
int producersMapCardinality
boolean publishLatencyMetrics
int currentBundleProducerIndex
java.util.List<E> batchToEmit
Instant bundleTimestamp
java.util.UUID writerTransformUuid
java.lang.Class<T> sparkReceiverClass
java.lang.Object[] constructorArgs
java.lang.Class<T> type
org.apache.thrift.protocol.TProtocolFactory protocolFactory
java.util.Map<K,V> typedefs
java.lang.String fileLocation
java.lang.String content
org.apache.tika.metadata.Metadata metadata
java.lang.String[] metadataNames
org.apache.beam.sdk.util.SerializableThrowable error
java.lang.Class<T> jaxbClass
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> jaxbMarshaller
org.apache.beam.sdk.util.EmptyOnDeserializationThreadLocal<T> jaxbUnmarshaller
org.apache.beam.sdk.io.xml.XmlIO.MappingConfiguration<T> configuration
MetricName name
boolean processWideContainer
boolean perWorkerCounter
MetricName name
boolean processWideContainer
MetricName name
org.apache.beam.sdk.util.HistogramData.BucketType bucketType
boolean processWideContainer
boolean perWorkerHistogram
int start
int end
ValueProvider<T> value
SerializableFunction<InputT,OutputT> translator
java.lang.Class<T> klass
java.lang.String methodName
java.lang.String propertyName
java.lang.Object defaultValue
java.lang.Long optionsId
java.lang.Object value
GenerateSequenceSchemaTransformProvider.GenerateSequenceConfiguration configuration
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.BiMap<K,V> fieldIndices
java.util.Map<K,V> encodingPositions
boolean encodingPositionsOverridden
java.util.List<E> fields
int hashCode
java.util.UUID uuid
Schema.Options options
java.util.Map<K,V> options
Schema schema
TypeDescriptor<T> typeDescriptor
SerializableFunction<InputT,OutputT> toRowFunction
SerializableFunction<InputT,OutputT> fromRowFunction
java.util.Map<K,V> cachedProviders
SimpleFunction<InputT,OutputT> transform
PTransform<InputT extends PInput,OutputT extends POutput> deadLetter
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.BiMap<K,V> enumValues
java.util.List<E> values
int value
java.lang.String name
int byteArrayLength
int precision
int scale
java.lang.String name
int stringLength
Schema schema
Schema oneOfSchema
EnumerationType enumerationType
byte[] schemaProtoRepresentation
java.lang.String identifier
Schema.FieldType argumentType
java.lang.Object argument
Schema.FieldType fieldType
byte[] payload
java.lang.String name
int maxByteArrayLength
java.lang.String name
int maxStringLength
java.util.List<E> newFields
org.apache.beam.sdk.schemas.transforms.Cast.Narrowing.Fold fold
org.apache.beam.sdk.schemas.transforms.Cast.Widening.Fold fold
org.apache.beam.sdk.schemas.transforms.CoGroup.JoinArguments joinArgs
org.apache.beam.sdk.schemas.transforms.CoGroup.JoinArguments joinArgs
java.lang.String keyFieldName
FieldAccessDescriptor fieldsToDrop
java.util.List<E> filters
org.apache.beam.sdk.schemas.transforms.SchemaAggregateFn.Inner schemaAggregateFn
int fanout
Combine.CombineFn<InputT,AccumT,OutputT> combineFn
int fanout
FieldAccessDescriptor lhs
FieldAccessDescriptor rhs
org.apache.beam.sdk.schemas.transforms.Join.JoinType joinType
Join.FieldsEqual.Impl predicate
java.util.List<E> renames
org.apache.beam.sdk.schemas.transforms.TypedSchemaTransformProviderTest.Configuration config
FieldAccessDescriptor fieldAccessDescriptor
JavaExplodeTransformProvider.Configuration configuration
JavaFilterTransformProvider.Configuration configuration
JavaMapToFieldsTransformProvider.Configuration configuration
JavaRowUdf.Configuration config
Schema inputSchema
Schema.FieldType outputType
LoggingTransformProvider.Configuration configuration
javax.tools.DiagnosticCollector<S> diagnostics
java.util.Map<K,V> delegateMap
SchemaCoder<T> outputSchemaCoder
Schema.FieldType unboxedType
Schema inputSchema
FieldAccessDescriptor fieldAccessDescriptor
boolean optimized
java.lang.String expectedChecksum
java.lang.String actualChecksum
SerializableFunction<InputT,OutputT> checkerFn
org.apache.beam.sdk.testing.PAssert.AssertionWindows rewindowingStrategy
SimpleFunction<InputT,OutputT> paneExtractor
PAssert.PAssertionSite site
SerializableFunction<InputT,OutputT> checkerFn
org.apache.beam.sdk.testing.PAssert.AssertionWindows rewindowingStrategy
SimpleFunction<InputT,OutputT> paneExtractor
PAssert.PAssertionSite site
SerializableMatcher<T> matcher
SerializableFunction<InputT,OutputT> checkerFn
PAssert.PAssertionSite site
java.lang.String message
java.lang.StackTraceElement[] creationStackTrace
boolean isSuccess
PAssert.PAssertionSite site
org.apache.beam.sdk.util.SerializableThrowable throwable
java.util.UUID uuid
TimestampedValue.TimestampedValueCoder<T> elementCoder
java.util.Comparator<T> compareFn
int numQuantiles
int bufferSize
int numBuffers
long maxNumElements
java.util.TreeSet<E> heap
long minHash
long sampleSize
long sampleSize
java.lang.Double maximumEstimationError
long sampleSize
java.lang.Double maximumEstimationError
int sampleSize
int elementCount
int uniqueCount
int sampleSize
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
boolean insertDefault
int fanout
java.util.List<E> sideInputs
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
boolean insertDefault
int fanout
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
java.util.List<E> sideInputs
SerializableFunction<InputT,OutputT> combiner
int bufferSize
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
boolean fewKeys
java.util.List<E> sideInputs
CombineFnBase.GlobalCombineFn<InputT,AccumT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
SerializableFunction<InputT,OutputT> hotKeyFanout
boolean fewKeys
java.util.List<E> sideInputs
java.util.Map<K,V> valuesMap
java.util.List<E> combineFns
java.util.List<E> combineInputCoders
java.util.List<E> extractInputFns
java.util.List<E> outputTags
int combineFnCount
java.util.List<E> extractInputFns
java.util.List<E> combineInputCoders
java.util.List<E> combineFnWithContexts
java.util.List<E> outputTags
int combineFnCount
PCollectionView<T> view
long sum
long inputs
long merges
long outputs
java.lang.Object closure
Requirements requirements
ValueProvider<T> provider
Coder<T> coder
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.base.Optional<T> windowCoder
TimeDomain timeDomain
Duration duration
TimeDomain timeDomain
Duration duration
SerializableFunction<InputT,OutputT> fn
TypeDescriptor<T> type
Coder<T> coder
TimeDomain timeDomain
Duration duration
SerializableFunction<InputT,OutputT> fn
TypeDescriptor<T> representativeType
ProcessFunction<InputT,OutputT> predicate
java.lang.String predicateDescription
TestPipeline p
java.lang.Object fn
java.lang.Object fn
ProcessFunction<InputT,OutputT> exceptionHandler
boolean fewKeys
GroupIntoBatches.BatchingParams<InputT> params
ProcessFunction<InputT,OutputT> fn
ExpectedException thrown
java.lang.Object fn
java.lang.Object fn
ProcessFunction<InputT,OutputT> exceptionHandler
Contextful<ClosureT> fn
Contextful<ClosureT> fn
java.util.Map<K,V> sideInputs
TupleTag<V> mainOutputTag
TupleTagList additionalOutputTags
DisplayData.ItemSpec<T> fnDisplayData
DoFn<InputT,OutputT> fn
java.util.Map<K,V> sideInputs
DoFn<InputT,OutputT> fn
DisplayData.ItemSpec<T> fnDisplayData
PCollectionView<T> view
private void readObject(java.io.ObjectInputStream oos)
private void writeObject(java.io.ObjectOutputStream oos)
java.lang.Integer numBuckets
boolean allowDuplicates
boolean allowDuplicates
java.util.regex.Pattern pattern
java.util.regex.Pattern pattern
int group
java.util.regex.Pattern pattern
java.util.regex.Pattern pattern
int keyGroup
int valueGroup
java.util.regex.Pattern pattern
java.lang.String groupName
java.util.regex.Pattern pattern
java.lang.String keyGroupName
java.lang.String valueGroupName
java.util.regex.Pattern pattern
int group
java.util.regex.Pattern pattern
int keyGroup
int valueGroup
java.util.regex.Pattern pattern
java.lang.String groupName
java.util.regex.Pattern pattern
java.lang.String keyGroupName
java.lang.String valueGroupName
java.util.regex.Pattern pattern
java.lang.String replacement
java.util.regex.Pattern pattern
java.lang.String replacement
java.util.regex.Pattern pattern
boolean outputEmpty
int splitLimit
java.util.Collection<E> sideInputs
int shard
java.lang.Integer numBuckets
java.lang.Integer numBuckets
int sampleSize
Top.TopCombineFn<T,ComparatorT extends java.util.Comparator<T> & java.io.Serializable> topCombineFn
java.util.Random rand
java.lang.Object[] expectedValues
int expectedSize
SerializableFunction<InputT,OutputT> fn
java.util.UUID uuid
PTransform<InputT extends PInput,OutputT extends POutput> consumer
int count
java.util.Comparator<T> compareFn
java.lang.Boolean withRandomAccess
boolean inMemory
boolean inMemory
boolean inMemory
java.lang.Object defaultValue
boolean hasDefault
PCollectionView<T> view
java.util.Map<K,V> windowsToOffsets
SerializableFunction<InputT,OutputT> fn
SerializableFunction<InputT,OutputT> fn
Duration allowedTimestampSkew
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableMap<K,V> entries
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.collect.ImmutableList<E> components
ErrorHandler<ErrorT,OutputT extends POutput> errorHandler
BadRecordRouter badRecordRouter
BadRecordRouter badRecordRouter
private void readObject(java.io.ObjectInputStream aInputStream) throws java.lang.ClassNotFoundException, java.io.IOException
java.lang.ClassNotFoundException
java.io.IOException
Counter errorCounter
Counter errorCounter
CoGbkResultSchema schema
UnionCoder unionCoder
TupleTagList tupleTagList
java.util.HashMap<K,V> tagMap
java.util.List<E> elementCoders
ExpectedException thrown
int countElems
java.util.List<E> timestampTransforms
Trigger.OnceTrigger earlyTrigger
Trigger.OnceTrigger lateTrigger
int number
DateTime startDate
DateTimeZone timeZone
int number
int dayOfMonth
DateTime startDate
DateTimeZone timeZone
int number
int monthOfYear
int dayOfMonth
DateTime startDate
DateTimeZone timeZone
WindowFn<T,W extends BoundedWindow> givenWindowFn
java.lang.String reason
Trigger repeatedTrigger
Duration gapDuration
java.util.List<E> subTriggers
Window<T> original
WindowingStrategy<T,W extends BoundedWindow> updatedStrategy
Duration maximumLookback
java.lang.Throwable throwable
java.lang.Object key
java.lang.Object value
PCollectionViews.TypeDescriptorSupplier<T> typeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> typeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> typeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> typeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> typeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> typeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> typeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> keyTypeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> valueTypeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> keyTypeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> valueTypeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> keyTypeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> valueTypeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> keyTypeDescriptorSupplier
PCollectionViews.TypeDescriptorSupplier<T> valueTypeDescriptorSupplier
TupleTag<V> tag
WindowMappingFn<TargetWindowT extends BoundedWindow> windowMappingFn
WindowingStrategy<T,W extends BoundedWindow> windowingStrategy
Coder<T> coder
ViewFn<PrimitiveViewT,ViewT> viewFn
ViewFn
for this view.byte[] encodedDefaultValue
Coder<T> valueCoder
boolean hasDefault
PCollectionViews.TypeDescriptorSupplier<T> typeDescriptorSupplier
byte[] encodedDefaultValue
Coder<T> valueCoder
boolean hasDefault
PCollectionViews.TypeDescriptorSupplier<T> typeDescriptorSupplier
Schema schema
java.lang.Object getterTarget
java.util.List<E> getters
java.util.Map<K,V> cache
java.util.List<E> values
java.lang.Object key
int shardNumber
java.lang.String id
boolean generated
java.util.List<E> tupleTags
org.apache.beam.vendor.guava.v32_1_2_jre.com.google.common.reflect.TypeToken<T> token
Coder<T> valueCoder
ByteArrayCoder idCoder
WindowFn<T,W extends BoundedWindow> windowFn
Trigger trigger
WindowingStrategy.AccumulationMode mode
Duration allowedLateness
Window.ClosingBehavior closingBehavior
Window.OnTimeBehavior onTimeBehavior
TimestampCombiner timestampCombiner
java.lang.String environmentId
boolean alreadyMerged
boolean triggerSpecified
boolean modeSpecified
boolean allowedLatenessSpecified
boolean timestampCombinerSpecified