public class WrappedSupervisor
extends org.apache.spark.streaming.receiver.ReceiverSupervisor
ReceiverSupervisor
that doesn't use Spark Environment.Constructor and Description |
---|
WrappedSupervisor(org.apache.spark.streaming.receiver.Receiver<?> receiver,
org.apache.spark.SparkConf conf,
SerializableFunction<java.lang.Object[],java.lang.Void> storeFn) |
Modifier and Type | Method and Description |
---|---|
org.apache.spark.streaming.receiver.BlockGenerator |
createBlockGenerator(org.apache.spark.streaming.receiver.BlockGeneratorListener blockGeneratorListener) |
long |
getCurrentRateLimit() |
boolean |
isReceiverStopped() |
void |
logInfo(scala.Function0<java.lang.String> msg) |
boolean |
onReceiverStart() |
void |
pushArrayBuffer(scala.collection.mutable.ArrayBuffer<?> arrayBuffer,
scala.Option<java.lang.Object> option,
scala.Option<org.apache.spark.storage.StreamBlockId> option1) |
void |
pushBytes(java.nio.ByteBuffer byteBuffer,
scala.Option<java.lang.Object> option,
scala.Option<org.apache.spark.storage.StreamBlockId> option1) |
void |
pushIterator(scala.collection.Iterator<?> iterator,
scala.Option<java.lang.Object> option,
scala.Option<org.apache.spark.storage.StreamBlockId> option1) |
void |
pushSingle(java.lang.Object o) |
void |
reportError(java.lang.String s,
java.lang.Throwable throwable) |
awaitTermination, initializeForcefully, initializeLogIfNecessary, initializeLogIfNecessary, initializeLogIfNecessary$default$2, isReceiverStarted, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logName, logTrace, logTrace, logWarning, logWarning, onReceiverStop, onStart, onStop, org$apache$spark$internal$Logging$$log__$eq, org$apache$spark$internal$Logging$$log_, receiverState_$eq, receiverState, ReceiverState, restartReceiver, restartReceiver, restartReceiver$default$2, start, startReceiver, stop, stoppingError_$eq, stoppingError, stopReceiver, streamId
public WrappedSupervisor(org.apache.spark.streaming.receiver.Receiver<?> receiver, org.apache.spark.SparkConf conf, SerializableFunction<java.lang.Object[],java.lang.Void> storeFn)
public void pushSingle(java.lang.Object o)
pushSingle
in class org.apache.spark.streaming.receiver.ReceiverSupervisor
public void pushBytes(java.nio.ByteBuffer byteBuffer, scala.Option<java.lang.Object> option, scala.Option<org.apache.spark.storage.StreamBlockId> option1)
pushBytes
in class org.apache.spark.streaming.receiver.ReceiverSupervisor
public void pushIterator(scala.collection.Iterator<?> iterator, scala.Option<java.lang.Object> option, scala.Option<org.apache.spark.storage.StreamBlockId> option1)
pushIterator
in class org.apache.spark.streaming.receiver.ReceiverSupervisor
public void pushArrayBuffer(scala.collection.mutable.ArrayBuffer<?> arrayBuffer, scala.Option<java.lang.Object> option, scala.Option<org.apache.spark.storage.StreamBlockId> option1)
pushArrayBuffer
in class org.apache.spark.streaming.receiver.ReceiverSupervisor
public org.apache.spark.streaming.receiver.BlockGenerator createBlockGenerator(org.apache.spark.streaming.receiver.BlockGeneratorListener blockGeneratorListener)
createBlockGenerator
in class org.apache.spark.streaming.receiver.ReceiverSupervisor
public void reportError(java.lang.String s, java.lang.Throwable throwable)
reportError
in class org.apache.spark.streaming.receiver.ReceiverSupervisor
public boolean onReceiverStart()
onReceiverStart
in class org.apache.spark.streaming.receiver.ReceiverSupervisor
public long getCurrentRateLimit()
getCurrentRateLimit
in class org.apache.spark.streaming.receiver.ReceiverSupervisor
public boolean isReceiverStopped()
isReceiverStopped
in class org.apache.spark.streaming.receiver.ReceiverSupervisor
public void logInfo(scala.Function0<java.lang.String> msg)
logInfo
in interface org.apache.spark.internal.Logging
logInfo
in class org.apache.spark.streaming.receiver.ReceiverSupervisor