case class KafkaInput(name: String, brokers: String, topic: Option[String], topicPattern: Option[String], consumerGroup: Option[String], options: Option[Map[String, String]], schemaRegistryUrl: Option[String], schemaSubject: Option[String], schemaId: Option[Int], isBatch: Option[Boolean], format: Option[String], incremental: Option[Incremental]) extends IncrementalReader with DatasourceReader with Product with Serializable
Represents a Kafka data source input configuration.
- name
The name of the Kafka input.
- brokers
The Kafka brokers to connect to.
- topic
Optional Kafka topic to subscribe to.
- topicPattern
Optional Kafka topic pattern to subscribe to.
- consumerGroup
Optional Kafka consumer group ID.
- options
Optional additional Kafka options.
- schemaRegistryUrl
Optional URL for the Avro schema registry.
- schemaSubject
Optional subject name for the Avro schema.
- schemaId
Optional ID for the Avro schema.
- isBatch
Optional flag indicating batch processing.
- format
Optional data format for Kafka messages.
- incremental
Optional incremental configuration.
- Alphabetic
- By Inheritance
- KafkaInput
- Serializable
- Serializable
- Product
- Equals
- DatasourceReader
- Reader
- IncrementalReader
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
KafkaInput(name: String, brokers: String, topic: Option[String], topicPattern: Option[String], consumerGroup: Option[String], options: Option[Map[String, String]], schemaRegistryUrl: Option[String], schemaSubject: Option[String], schemaId: Option[Int], isBatch: Option[Boolean], format: Option[String], incremental: Option[Incremental])
- name
The name of the Kafka input.
- brokers
The Kafka brokers to connect to.
- topic
Optional Kafka topic to subscribe to.
- topicPattern
Optional Kafka topic pattern to subscribe to.
- consumerGroup
Optional Kafka consumer group ID.
- options
Optional additional Kafka options.
- schemaRegistryUrl
Optional URL for the Avro schema registry.
- schemaSubject
Optional subject name for the Avro schema.
- schemaId
Optional ID for the Avro schema.
- isBatch
Optional flag indicating batch processing.
- format
Optional data format for Kafka messages.
- incremental
Optional incremental configuration.
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- val brokers: String
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
- val consumerGroup: Option[String]
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
- val format: Option[String]
- val fromConfluentAvro: Boolean
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- val incremental: Option[Incremental]
- val isBatch: Option[Boolean]
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
lazy val
log: Logger
- Annotations
- @transient()
-
val
name: String
The name of the data reader.
The name of the data reader.
- Definition Classes
- KafkaInput → Reader
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- val options: Option[Map[String, String]]
-
def
persistIncrementalState(): Unit
Persists the incremental state if any.
Persists the incremental state if any.
- Definition Classes
- IncrementalReader
-
def
read(sparkSession: SparkSession): DataFrame
Reads data from the Kafka data source and returns a DataFrame.
Reads data from the Kafka data source and returns a DataFrame.
- sparkSession
The SparkSession instance.
- returns
The DataFrame containing the data from the Kafka data source.
- Definition Classes
- KafkaInput → Reader
-
def
readIncremental(df: DataFrame, incremental: Option[Incremental]): DataFrame
Reads data from a DataFrame with optional incremental settings.
Reads data from a DataFrame with optional incremental settings.
- df
The DataFrame to read data from.
- incremental
Optional Incremental settings to apply.
- returns
A new DataFrame after applying incremental settings if provided, otherwise the original DataFrame.
- Definition Classes
- IncrementalReader
- val schemaId: Option[Int]
- val schemaRegistryUrl: Option[String]
- val schemaSubject: Option[String]
- val subject: String
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
- val topic: Option[String]
- val topicPattern: Option[String]
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()