Packages

case class SnowflakeTableDataObject(id: DataObjectId, table: Table, schemaMin: Option[GenericSchema] = None, constraints: Seq[Constraint] = Seq(), expectations: Seq[Expectation] = Seq(), saveMode: SDLSaveMode = SDLSaveMode.Overwrite, connectionId: ConnectionId, comment: Option[String] = None, metadata: Option[DataObjectMetadata] = None)(implicit instanceRegistry: InstanceRegistry) extends TransactionalTableDataObject with ExpectationValidation with Product with Serializable

DataObject of type SnowflakeTableDataObject. Provides details to access Snowflake tables via an action Can be used both for interacting with Snowflake through Spark with JDBC, as well as for actions written in the Snowpark API that run directly on Snowflake

id

unique name of this data object

table

Snowflake table to be written by this output

constraints

List of row-level Constraints to enforce when writing to this data object.

expectations

List of Expectations to enforce when writing to this data object. Expectations are checks based on aggregates over all rows of a dataset.

saveMode

spark SDLSaveMode to use when writing files, default is "overwrite"

connectionId

The SnowflakeTableConnection to use for the table

comment

An optional comment to add to the table after writing a DataFrame to it

metadata

meta data

Annotations
@Scaladoc()
Linear Supertypes
Serializable, Serializable, Product, Equals, ExpectationValidation, TransactionalTableDataObject, CanWriteSparkDataFrame, CanWriteDataFrame, CanCreateSparkDataFrame, TableDataObject, SchemaValidation, CanCreateDataFrame, DataObject, AtlasExportable, SmartDataLakeLogger, ParsableFromConfig[DataObject], SdlConfigObject, ConfigHolder, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. SnowflakeTableDataObject
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. ExpectationValidation
  7. TransactionalTableDataObject
  8. CanWriteSparkDataFrame
  9. CanWriteDataFrame
  10. CanCreateSparkDataFrame
  11. TableDataObject
  12. SchemaValidation
  13. CanCreateDataFrame
  14. DataObject
  15. AtlasExportable
  16. SmartDataLakeLogger
  17. ParsableFromConfig
  18. SdlConfigObject
  19. ConfigHolder
  20. AnyRef
  21. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SnowflakeTableDataObject(id: DataObjectId, table: Table, schemaMin: Option[GenericSchema] = None, constraints: Seq[Constraint] = Seq(), expectations: Seq[Expectation] = Seq(), saveMode: SDLSaveMode = SDLSaveMode.Overwrite, connectionId: ConnectionId, comment: Option[String] = None, metadata: Option[DataObjectMetadata] = None)(implicit instanceRegistry: InstanceRegistry)

    id

    unique name of this data object

    table

    Snowflake table to be written by this output

    constraints

    List of row-level Constraints to enforce when writing to this data object.

    expectations

    List of Expectations to enforce when writing to this data object. Expectations are checks based on aggregates over all rows of a dataset.

    saveMode

    spark SDLSaveMode to use when writing files, default is "overwrite"

    connectionId

    The SnowflakeTableConnection to use for the table

    comment

    An optional comment to add to the table after writing a DataFrame to it

    metadata

    meta data

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. def addFieldIfNotExisting(writeSchema: GenericSchema, colName: String, dataType: GenericDataType): GenericSchema
    Attributes
    protected
    Definition Classes
    CanCreateDataFrame
  5. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  6. def atlasName: String
    Definition Classes
    TableDataObject → DataObject → AtlasExportable
  7. def atlasQualifiedName(prefix: String): String
    Definition Classes
    TableDataObject → AtlasExportable
  8. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native() @HotSpotIntrinsicCandidate()
  9. val comment: Option[String]
  10. val connectionId: ConnectionId
  11. val constraints: Seq[Constraint]
    Definition Classes
    SnowflakeTableDataObject → ExpectationValidation
  12. def createReadSchema(writeSchema: GenericSchema)(implicit context: ActionPipelineContext): GenericSchema
    Definition Classes
    CanCreateDataFrame
    Annotations
    @Scaladoc()
  13. def dropTable(implicit context: ActionPipelineContext): Unit
    Definition Classes
    SnowflakeTableDataObject → TableDataObject
  14. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  15. val expectations: Seq[Expectation]
    Definition Classes
    SnowflakeTableDataObject → ExpectationValidation
  16. def factory: FromConfigFactory[DataObject]
    Definition Classes
    SnowflakeTableDataObject → ParsableFromConfig
  17. def forceGenericObservation: Boolean
    Attributes
    protected
    Definition Classes
    ExpectationValidation
  18. val fullyQualifiedTableName: String
  19. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  20. def getConnection[T <: Connection](connectionId: ConnectionId)(implicit registry: InstanceRegistry, ct: ClassTag[T], tt: scala.reflect.api.JavaUniverse.TypeTag[T]): T
    Attributes
    protected
    Definition Classes
    DataObject
    Annotations
    @Scaladoc()
  21. def getConnectionReg[T <: Connection](connectionId: ConnectionId, registry: InstanceRegistry)(implicit ct: ClassTag[T], tt: scala.reflect.api.JavaUniverse.TypeTag[T]): T
    Attributes
    protected
    Definition Classes
    DataObject
  22. def getDataFrame(partitionValues: Seq[PartitionValues] = Seq(), subFeedType: scala.reflect.api.JavaUniverse.Type)(implicit context: ActionPipelineContext): GenericDataFrame
    Definition Classes
    SnowflakeTableDataObject → CanCreateSparkDataFrame → CanCreateDataFrame
  23. def getPKduplicates(subFeedType: scala.reflect.api.JavaUniverse.Type)(implicit context: ActionPipelineContext): GenericDataFrame
    Definition Classes
    TableDataObject
  24. def getPKnulls(subFeedType: scala.reflect.api.JavaUniverse.Type)(implicit context: ActionPipelineContext): GenericDataFrame
    Definition Classes
    TableDataObject
  25. def getPKviolators(subFeedType: scala.reflect.api.JavaUniverse.Type)(implicit context: ActionPipelineContext): GenericDataFrame
    Definition Classes
    TableDataObject
  26. def getSnowparkDataFrame(partitionValues: Seq[PartitionValues] = Seq())(implicit context: ActionPipelineContext): DataFrame

    Read the contents of a table as a Snowpark DataFrame

    Read the contents of a table as a Snowpark DataFrame

    Annotations
    @Scaladoc()
  27. def getSparkDataFrame(partitionValues: Seq[PartitionValues] = Seq())(implicit context: ActionPipelineContext): DataFrame
    Definition Classes
    SnowflakeTableDataObject → CanCreateSparkDataFrame
  28. def housekeepingMode: Option[HousekeepingMode]
    Definition Classes
    DataObject
    Annotations
    @Scaladoc()
  29. val id: DataObjectId
    Definition Classes
    SnowflakeTableDataObject → DataObject → SdlConfigObject
  30. def init(df: GenericDataFrame, partitionValues: Seq[PartitionValues], saveModeOptions: Option[SaveModeOptions] = None)(implicit context: ActionPipelineContext): Unit
    Definition Classes
    SnowflakeTableDataObject → CanWriteSparkDataFrame → CanWriteDataFrame
  31. def initSparkDataFrame(df: DataFrame, partitionValues: Seq[PartitionValues], saveModeOptions: Option[SaveModeOptions])(implicit context: ActionPipelineContext): Unit
    Definition Classes
    CanWriteSparkDataFrame
  32. implicit val instanceRegistry: InstanceRegistry
  33. def isDbExisting(implicit context: ActionPipelineContext): Boolean
    Definition Classes
    SnowflakeTableDataObject → TableDataObject
  34. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  35. def isTableExisting(implicit context: ActionPipelineContext): Boolean
    Definition Classes
    SnowflakeTableDataObject → TableDataObject
  36. lazy val logger: Logger
    Attributes
    protected
    Definition Classes
    SmartDataLakeLogger
    Annotations
    @transient()
  37. val metadata: Option[DataObjectMetadata]
    Definition Classes
    SnowflakeTableDataObject → DataObject
  38. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  39. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  40. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native() @HotSpotIntrinsicCandidate()
  41. def options: Map[String, String]
    Definition Classes
    TransactionalTableDataObject → CanWriteSparkDataFrame → CanCreateSparkDataFrame
  42. val saveMode: SDLSaveMode
  43. val schemaMin: Option[GenericSchema]
    Definition Classes
    SnowflakeTableDataObject → SchemaValidation
  44. def setupConstraintsAndJobExpectations(df: GenericDataFrame)(implicit context: ActionPipelineContext): (GenericDataFrame, DataFrameObservation)
    Definition Classes
    ExpectationValidation
  45. def snowparkSession: Session
  46. def streamingOptions: Map[String, String]
    Definition Classes
    CanWriteDataFrame
  47. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  48. var table: Table
    Definition Classes
    SnowflakeTableDataObject → TableDataObject
  49. def toStringShort: String
    Definition Classes
    DataObject
  50. def validateExpectations(dfJob: GenericDataFrame, dfAll: GenericDataFrame, partitionValues: Seq[PartitionValues], scopeJobMetrics: Map[String, _])(implicit context: ActionPipelineContext): Map[String, _]
    Definition Classes
    ExpectationValidation
  51. def validateSchema(schema: GenericSchema, schemaExpected: GenericSchema, role: String): Unit
    Definition Classes
    SchemaValidation
    Annotations
    @Scaladoc()
  52. def validateSchemaMin(schema: GenericSchema, role: String): Unit
    Definition Classes
    SchemaValidation
    Annotations
    @Scaladoc()
  53. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  54. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  55. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  56. def writeDataFrame(df: GenericDataFrame, partitionValues: Seq[PartitionValues], isRecursiveInput: Boolean, saveModeOptions: Option[SaveModeOptions])(implicit context: ActionPipelineContext): Unit
    Definition Classes
    SnowflakeTableDataObject → CanWriteSparkDataFrame → CanWriteDataFrame
  57. def writeSnowparkDataFrame(df: DataFrame, partitionValues: Seq[PartitionValues], isRecursiveInput: Boolean = false, saveModeOptions: Option[SaveModeOptions] = None)(implicit context: ActionPipelineContext): Unit

    Write a Snowpark DataFrame to Snowflake, used in Snowpark actions

    Write a Snowpark DataFrame to Snowflake, used in Snowpark actions

    Annotations
    @Scaladoc()
  58. def writeSparkDataFrame(df: DataFrame, partitionValues: Seq[PartitionValues], isRecursiveInput: Boolean, saveModeOptions: Option[SaveModeOptions])(implicit context: ActionPipelineContext): Unit
    Definition Classes
    SnowflakeTableDataObject → CanWriteSparkDataFrame
  59. def writeStreamingDataFrame(df: GenericDataFrame, trigger: Trigger, options: Map[String, String], checkpointLocation: String, queryName: String, outputMode: OutputMode, saveModeOptions: Option[SaveModeOptions])(implicit context: ActionPipelineContext): StreamingQuery
    Definition Classes
    CanWriteSparkDataFrame → CanWriteDataFrame

Deprecated Value Members

  1. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] ) @Deprecated
    Deprecated

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from ExpectationValidation

Inherited from TransactionalTableDataObject

Inherited from CanWriteSparkDataFrame

Inherited from CanWriteDataFrame

Inherited from CanCreateSparkDataFrame

Inherited from TableDataObject

Inherited from SchemaValidation

Inherited from CanCreateDataFrame

Inherited from DataObject

Inherited from AtlasExportable

Inherited from SmartDataLakeLogger

Inherited from ParsableFromConfig[DataObject]

Inherited from SdlConfigObject

Inherited from ConfigHolder

Inherited from AnyRef

Inherited from Any

Ungrouped