pyflare.sdk.writers package

Submodules

pyflare.sdk.writers.bigquery_writer module

class pyflare.sdk.writers.bigquery_writer.BigqueryOutputWriter(write_config: WriteConfig)[source]

Bases: Writer

get_conf()[source]
write(df)[source]
write_stream()[source]

pyflare.sdk.writers.delta_writer module

class pyflare.sdk.writers.delta_writer.DeltaOutputWriter(write_config: WriteConfig)[source]

Bases: FileOutputWriter

DELTA_CONF = '[\n            ("spark.sql.catalog.spark_catalog","org.apache.spark.sql.delta.catalog.DeltaCatalog"),\n            ("spark.sql.extensions", "io.delta.sql.DeltaSparkSessionExtension")\n        ]'
get_conf()[source]
write(df)[source]
write_stream()[source]

pyflare.sdk.writers.elasticsearch_writer module

class pyflare.sdk.writers.elasticsearch_writer.ElasticSearchOutputWriter(write_config: WriteConfig)[source]

Bases: Writer

get_conf()[source]
write(df)[source]
write_stream()[source]

pyflare.sdk.writers.fastbase_writer module

class pyflare.sdk.writers.fastbase_writer.FastBaseOutputWriter(write_config: WriteConfig)[source]

Bases: FileOutputWriter

PULSAR_Options = '[\n            ("service.url", "{serviceUrl}"),\n            ("admin.url", "{adminUrl}"),\n            ("pulsar.admin.authPluginClassName", "org.apache.pulsar.client.impl.auth.AuthenticationToken"),\n            ("pulsar.admin.authParams","token:{Apikey}"),\n            ("pulsar.client.authPluginClassName","org.apache.pulsar.client.impl.auth.AuthenticationToken"),\n            ("pulsar.client.authParams","token:{Apikey}"),\n            ("topic","persistent://public/default/{dataset}")\n        ]'
get_conf()[source]
write(df)[source]
write_stream()[source]

pyflare.sdk.writers.file_writer module

class pyflare.sdk.writers.file_writer.FileOutputWriter(write_config: WriteConfig)[source]

Bases: Writer

get_conf()[source]
write(df)[source]
write_csv()[source]
write_json()[source]
write_parquet()[source]
write_stream()[source]

pyflare.sdk.writers.iceberg_writer module

class pyflare.sdk.writers.iceberg_writer.IcebergOutputWriter(write_config: WriteConfig)[source]

Bases: FileOutputWriter

ICEBERG_CONF = '[\n            ("spark.sql.catalog.{catalog_name}", "org.apache.iceberg.spark.SparkCatalog"),\n            ("spark.sql.catalog.{catalog_name}.type", "hadoop"),\n            ("spark.sql.catalog.{catalog_name}.warehouse", "{depot_base_path}")\n        ]'
get_conf()[source]
write(df)[source]
write_stream()[source]

pyflare.sdk.writers.jdbc_writer module

class pyflare.sdk.writers.jdbc_writer.JDBCOutputWriter(write_config: WriteConfig)[source]

Bases: Writer

get_conf()[source]
write(df)[source]
write_stream()[source]

pyflare.sdk.writers.snowflake_writer module

class pyflare.sdk.writers.snowflake_writer.SnowflakeOutputWriter(write_config: WriteConfig)[source]

Bases: Writer

SNOWFLAKE_WRITE_OPTIONS = '{{\n                "sfURL": "{connection_url}",\n                "sfUser": "{connection_user}",\n                "sfPassword": "{connection_password}",\n                "sfDatabase": "{connection_database}",\n                "sfSchema": "{collection}",\n                "sfWarehouse": "{connection_warehouse}",\n                "dbtable": "{dataset}"\n            }}'
get_conf()[source]
spark_options_conf()[source]
write(df)[source]
write_stream()[source]

pyflare.sdk.writers.writer module

class pyflare.sdk.writers.writer.Writer(write_config: WriteConfig)[source]

Bases: object

abstract get_conf()[source]
property spark: SparkSession
property view_name
abstract write(df)[source]
property write_config: WriteConfig
abstract write_stream()[source]

Module contents