@DefaultSchema(value=AutoValueSchema.class) public abstract class KafkaReadSchemaTransformConfiguration extends java.lang.Object
Internal only: This class is actively being worked on, and it will likely change. We provide no backwards compatibility guarantees, and it should not be implemented outside the Beam repository.
Modifier and Type | Class and Description |
---|---|
static class |
KafkaReadSchemaTransformConfiguration.Builder
Builder for the
KafkaReadSchemaTransformConfiguration . |
Modifier and Type | Field and Description |
---|---|
static java.util.Set<java.lang.String> |
VALID_DATA_FORMATS |
static java.lang.String |
VALID_FORMATS_STR |
static java.util.Set<java.lang.String> |
VALID_START_OFFSET_VALUES |
Constructor and Description |
---|
KafkaReadSchemaTransformConfiguration() |
Modifier and Type | Method and Description |
---|---|
static KafkaReadSchemaTransformConfiguration.Builder |
builder()
Instantiates a
KafkaReadSchemaTransformConfiguration.Builder instance. |
abstract java.lang.String |
getAutoOffsetResetConfig() |
abstract java.lang.String |
getBootstrapServers()
Sets the bootstrap servers for the Kafka consumer.
|
abstract java.lang.String |
getConfluentSchemaRegistrySubject() |
abstract java.lang.String |
getConfluentSchemaRegistryUrl() |
abstract java.util.Map<java.lang.String,java.lang.String> |
getConsumerConfigUpdates() |
abstract ErrorHandling |
getErrorHandling() |
abstract java.lang.String |
getFileDescriptorPath() |
abstract java.lang.String |
getFormat() |
abstract java.lang.Integer |
getMaxReadTimeSeconds() |
abstract java.lang.String |
getMessageName() |
abstract java.lang.String |
getSchema() |
abstract java.lang.String |
getTopic()
Sets the topic from which to read.
|
void |
validate() |
public static final java.util.Set<java.lang.String> VALID_START_OFFSET_VALUES
public static final java.lang.String VALID_FORMATS_STR
public static final java.util.Set<java.lang.String> VALID_DATA_FORMATS
public KafkaReadSchemaTransformConfiguration()
public void validate()
public static KafkaReadSchemaTransformConfiguration.Builder builder()
KafkaReadSchemaTransformConfiguration.Builder
instance.@SchemaFieldDescription(value="A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. The client will make use of all servers irrespective of which servers are specified here for bootstrapping\u2014this list only impacts the initial hosts used to discover the full set of servers. This list should be in the form `host1:port1,host2:port2,...`") public abstract java.lang.String getBootstrapServers()
@Nullable public abstract java.lang.String getConfluentSchemaRegistryUrl()
@SchemaFieldDescription(value="The encoding format for the data stored in Kafka. Valid options are: RAW,AVRO,JSON,PROTO") @Nullable public abstract java.lang.String getFormat()
@Nullable public abstract java.lang.String getConfluentSchemaRegistrySubject()
@SchemaFieldDescription(value="The schema in which the data is encoded in the Kafka topic. For AVRO data, this is a schema defined with AVRO schema syntax (https://avro.apache.org/docs/1.10.2/spec.html#schemas). For JSON data, this is a schema defined with JSON-schema syntax (https://json-schema.org/). If a URL to Confluent Schema Registry is provided, then this field is ignored, and the schema is fetched from Confluent Schema Registry.") @Nullable public abstract java.lang.String getSchema()
@SchemaFieldDescription(value="The path to the Protocol Buffer File Descriptor Set file. This file is used for schema definition and message serialization.") @Nullable public abstract java.lang.String getFileDescriptorPath()
@SchemaFieldDescription(value="The name of the Protocol Buffer message to be used for schema extraction and data conversion.") @Nullable public abstract java.lang.String getMessageName()
@SchemaFieldDescription(value="What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server. (1) earliest: automatically reset the offset to the earliest offset. (2) latest: automatically reset the offset to the latest offset (3) none: throw exception to the consumer if no previous offset is found for the consumer\u2019s group") @Nullable public abstract java.lang.String getAutoOffsetResetConfig()
@SchemaFieldDescription(value="A list of key-value pairs that act as configuration parameters for Kafka consumers. Most of these configurations will not be needed, but if you need to customize your Kafka consumer, you may use this. See a detailed list: https://docs.confluent.io/platform/current/installation/configuration/consumer-configs.html") @Nullable public abstract java.util.Map<java.lang.String,java.lang.String> getConsumerConfigUpdates()
public abstract java.lang.String getTopic()
@SchemaFieldDescription(value="Upper bound of how long to read from Kafka.") @Nullable public abstract java.lang.Integer getMaxReadTimeSeconds()
@SchemaFieldDescription(value="This option specifies whether and where to output unwritable rows.") @Nullable public abstract ErrorHandling getErrorHandling()