protected class TransformTranslator.Context
extends java.lang.Object
| Modifier | Constructor and Description | 
|---|---|
| protected  | Context(org.apache.beam.sdk.runners.AppliedPTransform<InT,OutT,PTransform<InT,OutT>> transform,
       TranslationContext cxt) | 
| Modifier and Type | Method and Description | 
|---|---|
| <T> org.apache.spark.broadcast.Broadcast<T> | broadcast(T value) | 
| <T> org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> | createDataset(java.util.List<org.apache.beam.sdk.util.WindowedValue<T>> data,
             org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> enc) | 
| <T> org.apache.spark.sql.Encoder<T> | encoderOf(Coder<T> coder) | 
| org.apache.beam.sdk.runners.AppliedPTransform<InT,OutT,PTransform<InT,OutT>> | getCurrentTransform() | 
| <T> org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> | getDataset(PCollection<T> pCollection) | 
| InT | getInput() | 
| java.util.Map<TupleTag<?>,PCollection<?>> | getInputs() | 
| PipelineOptions | getOptions() | 
| OutT | getOutput() | 
| <T> PCollection<T> | getOutput(TupleTag<T> tag) | 
| java.util.Map<TupleTag<?>,PCollection<?>> | getOutputs() | 
| org.apache.beam.runners.core.construction.SerializablePipelineOptions | getSerializableOptions() | 
| <T> org.apache.spark.sql.Dataset<T> | getSideInputDataset(PCollectionView<?> sideInput) | 
| org.apache.spark.sql.SparkSession | getSparkSession() | 
| <K,V> org.apache.spark.sql.Encoder<K> | keyEncoderOf(KvCoder<K,V> coder) | 
| <K,V> org.apache.spark.sql.Encoder<KV<K,V>> | kvEncoderOf(KvCoder<K,V> coder) | 
| <T> void | putDataset(PCollection<T> pCollection,
          org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> dataset) | 
| <ViewT,ElemT> | setSideInputDataset(PCollectionView<ViewT> value,
                   org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<ElemT>> set) | 
| <T1,T2> org.apache.spark.sql.Encoder<scala.Tuple2<T1,T2>> | tupleEncoder(org.apache.spark.sql.Encoder<T1> e1,
            org.apache.spark.sql.Encoder<T2> e2) | 
| <K,V> org.apache.spark.sql.Encoder<V> | valueEncoderOf(KvCoder<K,V> coder) | 
| <T> org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> | windowedEncoder(Coder<T> coder) | 
| <T,W extends BoundedWindow> | windowedEncoder(Coder<T> coder,
               Coder<W> windowCoder) | 
| <T> org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> | windowedEncoder(org.apache.spark.sql.Encoder<T> enc) | 
| org.apache.spark.sql.Encoder<BoundedWindow> | windowEncoder() | 
protected Context(org.apache.beam.sdk.runners.AppliedPTransform<InT,OutT,PTransform<InT,OutT>> transform, TranslationContext cxt)
public InT getInput()
public java.util.Map<TupleTag<?>,PCollection<?>> getInputs()
public OutT getOutput()
public <T> PCollection<T> getOutput(TupleTag<T> tag)
public java.util.Map<TupleTag<?>,PCollection<?>> getOutputs()
public org.apache.beam.sdk.runners.AppliedPTransform<InT,OutT,PTransform<InT,OutT>> getCurrentTransform()
public <T> org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> getDataset(PCollection<T> pCollection)
public <T> void putDataset(PCollection<T> pCollection, org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> dataset)
public org.apache.beam.runners.core.construction.SerializablePipelineOptions getSerializableOptions()
public PipelineOptions getOptions()
public <ViewT,ElemT> void setSideInputDataset(PCollectionView<ViewT> value, org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<ElemT>> set)
public <T> org.apache.spark.sql.Dataset<T> getSideInputDataset(PCollectionView<?> sideInput)
public <T> org.apache.spark.sql.Dataset<org.apache.beam.sdk.util.WindowedValue<T>> createDataset(java.util.List<org.apache.beam.sdk.util.WindowedValue<T>> data,
                                                                                                 org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> enc)
public <T> org.apache.spark.broadcast.Broadcast<T> broadcast(T value)
public org.apache.spark.sql.SparkSession getSparkSession()
public <T> org.apache.spark.sql.Encoder<T> encoderOf(Coder<T> coder)
public <K,V> org.apache.spark.sql.Encoder<K> keyEncoderOf(KvCoder<K,V> coder)
public <K,V> org.apache.spark.sql.Encoder<V> valueEncoderOf(KvCoder<K,V> coder)
public <T> org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> windowedEncoder(Coder<T> coder)
public <T> org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> windowedEncoder(org.apache.spark.sql.Encoder<T> enc)
public <T1,T2> org.apache.spark.sql.Encoder<scala.Tuple2<T1,T2>> tupleEncoder(org.apache.spark.sql.Encoder<T1> e1,
                                                                              org.apache.spark.sql.Encoder<T2> e2)
public <T,W extends BoundedWindow> org.apache.spark.sql.Encoder<org.apache.beam.sdk.util.WindowedValue<T>> windowedEncoder(Coder<T> coder, Coder<W> windowCoder)
public org.apache.spark.sql.Encoder<BoundedWindow> windowEncoder()