@DefaultSchema(value=AutoValueSchema.class) public abstract static class KafkaWriteSchemaTransformProvider.KafkaWriteSchemaTransformConfiguration extends java.lang.Object implements java.io.Serializable
Modifier and Type | Class and Description |
---|---|
static class |
KafkaWriteSchemaTransformProvider.KafkaWriteSchemaTransformConfiguration.Builder |
Constructor and Description |
---|
KafkaWriteSchemaTransformConfiguration() |
Modifier and Type | Method and Description |
---|---|
static KafkaWriteSchemaTransformProvider.KafkaWriteSchemaTransformConfiguration.Builder |
builder() |
abstract java.lang.String |
getBootstrapServers() |
abstract java.lang.String |
getFormat() |
abstract java.util.Map<java.lang.String,java.lang.String> |
getProducerConfigUpdates() |
abstract java.lang.String |
getTopic() |
public KafkaWriteSchemaTransformConfiguration()
@SchemaFieldDescription(value="The encoding format for the data stored in Kafka. Valid options are: JSON,AVRO") public abstract java.lang.String getFormat()
public abstract java.lang.String getTopic()
@SchemaFieldDescription(value="A list of host/port pairs to use for establishing the initial connection to the Kafka cluster. The client will make use of all servers irrespective of which servers are specified here for bootstrapping\u2014this list only impacts the initial hosts used to discover the full set of servers. | Format: host1:port1,host2:port2,...") public abstract java.lang.String getBootstrapServers()
@SchemaFieldDescription(value="A list of key-value pairs that act as configuration parameters for Kafka producers. Most of these configurations will not be needed, but if you need to customize your Kafka producer, you may use this. See a detailed list: https://docs.confluent.io/platform/current/installation/configuration/producer-configs.html") @Nullable public abstract java.util.Map<java.lang.String,java.lang.String> getProducerConfigUpdates()
public static KafkaWriteSchemaTransformProvider.KafkaWriteSchemaTransformConfiguration.Builder builder()