protected class TransformTranslator.Context
extends java.lang.Object
PTransform
. The context is backed by the
shared PipelineTranslator.TranslationState
of the PipelineTranslator
.EncoderProvider.Factory<T>
Modifier and Type | Method and Description |
---|---|
<T> org.apache.spark.broadcast.Broadcast<T> |
broadcast(T value) |
<T> org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> |
createDataset(java.util.List<org.apache.beam.sdk.util.WindowedValue<T>> data,
org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> enc) |
<T> org.apache.spark.sql.Encoder<T> |
encoderOf(Coder<T> coder,
EncoderProvider.Factory<T> factory) |
org.apache.beam.sdk.runners.AppliedPTransform<InT,OutT,PTransform<InT,OutT>> |
getCurrentTransform() |
<T> org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> |
getDataset(PCollection<T> pCollection) |
InT |
getInput() |
java.util.Map<TupleTag<?>,PCollection<?>> |
getInputs() |
PipelineOptions |
getOptions() |
OutT |
getOutput() |
<T> PCollection<T> |
getOutput(TupleTag<T> tag) |
java.util.Map<TupleTag<?>,PCollection<?>> |
getOutputs() |
org.apache.beam.runners.core.construction.SerializablePipelineOptions |
getSerializableOptions() |
org.apache.spark.sql.SparkSession |
getSparkSession() |
default <T> void |
putDataset(PCollection<T> pCollection,
org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> dataset) |
<T> void |
putDataset(PCollection<T> pCollection,
org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> dataset,
boolean cache) |
<T1,T2> org.apache.spark.sql.Encoder<scala.Tuple2<T1,T2>> |
tupleEncoder(org.apache.spark.sql.Encoder<T1> e1,
org.apache.spark.sql.Encoder<T2> e2) |
<T> org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> |
windowedEncoder(Coder<T> coder) |
<T,W extends BoundedWindow> |
windowedEncoder(Coder<T> coder,
Coder<W> windowCoder) |
<T> org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> |
windowedEncoder(org.apache.spark.sql.Encoder<T> enc) |
org.apache.spark.sql.Encoder<BoundedWindow> |
windowEncoder() |
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
encoderFactory, encoderOf, keyEncoderOf, kvEncoderOf, valueEncoderOf
public InT getInput()
public java.util.Map<TupleTag<?>,PCollection<?>> getInputs()
public java.util.Map<TupleTag<?>,PCollection<?>> getOutputs()
public OutT getOutput()
public <T> PCollection<T> getOutput(TupleTag<T> tag)
public org.apache.beam.sdk.runners.AppliedPTransform<InT,OutT,PTransform<InT,OutT>> getCurrentTransform()
public <T> org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> getDataset(PCollection<T> pCollection)
public <T> void putDataset(PCollection<T> pCollection, org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> dataset, boolean cache)
public org.apache.beam.runners.core.construction.SerializablePipelineOptions getSerializableOptions()
public PipelineOptions getOptions()
public <T> org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> createDataset(java.util.List<org.apache.beam.sdk.util.WindowedValue<T>> data, org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> enc)
public <T> org.apache.spark.broadcast.Broadcast<T> broadcast(T value)
public org.apache.spark.sql.SparkSession getSparkSession()
public <T> org.apache.spark.sql.Encoder<T> encoderOf(Coder<T> coder, EncoderProvider.Factory<T> factory)
public <T1,T2> org.apache.spark.sql.Encoder<scala.Tuple2<T1,T2>> tupleEncoder(org.apache.spark.sql.Encoder<T1> e1, org.apache.spark.sql.Encoder<T2> e2)
public <T> org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> windowedEncoder(Coder<T> coder)
public <T> org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> windowedEncoder(org.apache.spark.sql.Encoder<T> enc)
public <T,W extends BoundedWindow> org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> windowedEncoder(Coder<T> coder, Coder<W> windowCoder)
public org.apache.spark.sql.Encoder<BoundedWindow> windowEncoder()
public <T> void putDataset(PCollection<T> pCollection, org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> dataset)