class FlareSession extends AnyRef
FlareSession class represents a session for running Spark jobs with Flare-specific configurations and capabilities.
- Alphabetic
- By Inheritance
- FlareSession
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
FlareSession(conf: SparkConf, readerFactory: ReaderFactory, writerFactory: WriterFactory, addressResolver: AddressResolver, dataPolicyClient: DataPolicyClient)
- conf
The SparkConf used to configure the SparkSession. If null, a default SparkConf will be used.
- readerFactory
The ReaderFactory used to create data source readers for loading data.
- writerFactory
The WriterFactory used to create data source writers for saving data.
- addressResolver
The AddressResolver used to resolve data addresses.
- dataPolicyClient
The DataPolicyClient used to enforce data governance policies.
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
- def getSparkSession: SparkSession
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
load(address: String, format: String = "iceberg", isStream: Boolean = false, datasetInputOptions: Option[DatasetInputOptions] = None, sparkOptions: Option[Map[String, String]] = None): DataFrame
Load data from the specified address and return it as a DataFrame.
Load data from the specified address and return it as a DataFrame. The data is governed using DataGovernor.
- address
The address of the data source to load.
- format
The data format (e.g., "iceberg") to be used for loading data. Default is "iceberg".
- isStream
Indicates whether the data source is a streaming source. Default is false.
- datasetInputOptions
Optional DatasetInputOptions for configuring the data source reader.
- sparkOptions
Optional Spark options to be passed to the data source reader.
- returns
DataFrame representing the loaded data.
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
save(address: String, df: DataFrame, format: String = "iceberg", datasetOutputOptions: Option[DatasetOutputOptions] = None): StreamingQuery
Save the DataFrame to the specified address using the specified data format and DatasetOutputOptions.
Save the DataFrame to the specified address using the specified data format and DatasetOutputOptions.
- address
The address of the data sink to save the DataFrame.
- df
The DataFrame to be saved.
- format
The data format (e.g., "iceberg") to be used for saving data. Default is "iceberg".
- datasetOutputOptions
Optional DatasetOutputOptions for configuring the data source writer.
- returns
StreamingQuery representing the active streaming query if the DataFrame is streaming; otherwise, null.
- var sparkSession: SparkSession
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()