pyflare.sdk.readers package

Submodules

pyflare.sdk.readers.bigquery_reader module

class pyflare.sdk.readers.bigquery_reader.BigqueryInputReader(read_config: ReadConfig)[source]

Bases: Reader

get_conf()[source]
read()[source]
read_stream()[source]

pyflare.sdk.readers.delta_reader module

class pyflare.sdk.readers.delta_reader.DeltaInputReader(read_config: ReadConfig)[source]

Bases: FileInputReader

DELTA_CONF = '[\n            ("spark.sql.catalog.spark_catalog","org.apache.spark.sql.delta.catalog.DeltaCatalog"),\n            ("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")\n        ]'
get_conf()[source]
read()[source]
read_stream()[source]

pyflare.sdk.readers.elasticsearch_reader module

class pyflare.sdk.readers.elasticsearch_reader.ElasticSearchInputReader(read_config: ReadConfig)[source]

Bases: Reader

get_conf()[source]
read()[source]
read_stream()[source]

pyflare.sdk.readers.fastbase_reader module

class pyflare.sdk.readers.fastbase_reader.FastBaseInputReader(read_config: ReadConfig)[source]

Bases: Reader

PULSAR_Options = '[\n            ("service.url", "{serviceUrl}"),\n            ("admin.url", "{adminUrl}"),\n            ("pulsar.admin.authPluginClassName", "org.apache.pulsar.client.impl.auth.AuthenticationToken"),\n            ("pulsar.admin.authParams","token:{Apikey}"),\n            ("pulsar.client.authPluginClassName","org.apache.pulsar.client.impl.auth.AuthenticationToken"),\n            ("pulsar.client.authParams","token:{Apikey}"),\n            ("topic","persistent://public/default/{dataset}")\n        ]'
get_conf()[source]
read()[source]
read_stream()[source]

pyflare.sdk.readers.file_reader module

class pyflare.sdk.readers.file_reader.FileInputReader(read_config: ReadConfig)[source]

Bases: Reader

get_conf()[source]
read()[source]
read_stream()[source]

pyflare.sdk.readers.iceberg_reader module

class pyflare.sdk.readers.iceberg_reader.IcebergInputReader(read_config: ReadConfig)[source]

Bases: FileInputReader

ICEBERG_CONF = '[\n            ("spark.sql.catalog.{catalog_name}", "org.apache.iceberg.spark.SparkCatalog"),\n            ("spark.sql.catalog.{catalog_name}.type", "hadoop"),\n            ("spark.sql.catalog.{catalog_name}.warehouse", "{depot_base_path}")\n        ]'
get_conf()[source]
read()[source]
read_stream()[source]

pyflare.sdk.readers.jdbc_reader module

class pyflare.sdk.readers.jdbc_reader.JDBCInputReader(read_config: ReadConfig)[source]

Bases: Reader

get_conf()[source]
read()[source]
read_stream()[source]

pyflare.sdk.readers.minerva_reader module

class pyflare.sdk.readers.minerva_reader.MinervaInputReader(read_config: ReadConfig)[source]

Bases: Reader

get_conf()[source]
get_minevra_options()[source]
read()[source]
read_stream()[source]

pyflare.sdk.readers.reader module

class pyflare.sdk.readers.reader.Reader(read_config: ReadConfig)[source]

Bases: object

abstract get_conf()[source]
abstract read()[source]
property read_config: ReadConfig
abstract read_stream()[source]
property spark: SparkSession
property view_name: str

pyflare.sdk.readers.snowflake_reader module

class pyflare.sdk.readers.snowflake_reader.SnowflakeInputReader(read_config: ReadConfig)[source]

Bases: Reader

SNOWFLAKE_READ_OPTIONS = '{{\n            "sfURL": "{connection_url}",\n            "sfUser": "{connection_user}",\n            "sfPassword": "{connection_password}",\n            "sfDatabase": "{connection_database}",\n            "sfSchema": "{collection}",\n            "sfWarehouse": "{connection_warehouse}",\n            "dbtable": "{dataset}"\n        }}'
get_conf()[source]
read()[source]
read_stream()[source]
spark_options_conf()[source]

Module contents