case class EventHubInput(name: String, endpoint: String, eventHubName: String, sasKeyName: String, sasKey: String, options: Option[Map[String, String]], isBatch: Option[Boolean], incremental: Option[Incremental]) extends IncrementalReader with DatasourceReader with Product with Serializable
Case class representing an Event Hub input configuration.
- name
The name of the Event Hub input.
- endpoint
The Event Hub endpoint.
- eventHubName
The Event Hub name.
- sasKeyName
The SAS key name for authentication.
- sasKey
The SAS key for authentication.
- options
Optional additional options for reading from Event Hub.
- isBatch
Optional flag indicating if batch processing should be used.
- incremental
Optional incremental configuration.
- Alphabetic
- By Inheritance
- EventHubInput
- Serializable
- Serializable
- Product
- Equals
- DatasourceReader
- Reader
- IncrementalReader
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
EventHubInput(name: String, endpoint: String, eventHubName: String, sasKeyName: String, sasKey: String, options: Option[Map[String, String]], isBatch: Option[Boolean], incremental: Option[Incremental])
- name
The name of the Event Hub input.
- endpoint
The Event Hub endpoint.
- eventHubName
The Event Hub name.
- sasKeyName
The SAS key name for authentication.
- sasKey
The SAS key for authentication.
- options
Optional additional options for reading from Event Hub.
- isBatch
Optional flag indicating if batch processing should be used.
- incremental
Optional incremental configuration.
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
- val endpoint: String
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- val eventHubName: String
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- val incremental: Option[Incremental]
- val isBatch: Option[Boolean]
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
lazy val
log: Logger
- Annotations
- @transient()
-
val
name: String
The name of the data reader.
The name of the data reader.
- Definition Classes
- EventHubInput → Reader
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- val options: Option[Map[String, String]]
-
def
persistIncrementalState(): Unit
Persists the incremental state if any.
Persists the incremental state if any.
- Definition Classes
- IncrementalReader
-
def
read(sparkSession: SparkSession): DataFrame
Reads data from Event Hub.
Reads data from Event Hub.
- sparkSession
The SparkSession instance.
- returns
The DataFrame containing the read data.
- Definition Classes
- EventHubInput → Reader
-
def
readBatch(sparkSession: SparkSession): DataFrame
Reads data from Event Hub in batch mode.
Reads data from Event Hub in batch mode.
- sparkSession
The SparkSession instance.
- returns
The DataFrame containing the read data.
-
def
readIncremental(df: DataFrame, incremental: Option[Incremental]): DataFrame
Reads data from a DataFrame with optional incremental settings.
Reads data from a DataFrame with optional incremental settings.
- df
The DataFrame to read data from.
- incremental
Optional Incremental settings to apply.
- returns
A new DataFrame after applying incremental settings if provided, otherwise the original DataFrame.
- Definition Classes
- IncrementalReader
-
def
readStream(sparkSession: SparkSession): DataFrame
Reads data from Event Hub in streaming mode.
Reads data from Event Hub in streaming mode.
- sparkSession
The SparkSession instance.
- returns
The DataFrame containing the read data.
- val sasKey: String
- val sasKeyName: String
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()