io.smartdatalake.workflow.dataobject
SnowflakeTableDataObject
Companion object SnowflakeTableDataObject
case class SnowflakeTableDataObject(id: DataObjectId, table: Table, schemaMin: Option[GenericSchema] = None, constraints: Seq[Constraint] = Seq(), expectations: Seq[Expectation] = Seq(), saveMode: SDLSaveMode = SDLSaveMode.Overwrite, connectionId: ConnectionId, comment: Option[String] = None, metadata: Option[DataObjectMetadata] = None)(implicit instanceRegistry: InstanceRegistry) extends TransactionalTableDataObject with ExpectationValidation with Product with Serializable
DataObject of type SnowflakeTableDataObject. Provides details to access Snowflake tables via an action Can be used both for interacting with Snowflake through Spark with JDBC, as well as for actions written in the Snowpark API that run directly on Snowflake
- id
unique name of this data object
- table
Snowflake table to be written by this output
- constraints
List of row-level Constraints to enforce when writing to this data object.
- expectations
List of Expectations to enforce when writing to this data object. Expectations are checks based on aggregates over all rows of a dataset.
- saveMode
spark SDLSaveMode to use when writing files, default is "overwrite"
- connectionId
The SnowflakeTableConnection to use for the table
- comment
An optional comment to add to the table after writing a DataFrame to it
- metadata
meta data
- Annotations
- @Scaladoc()
- Alphabetic
- By Inheritance
- SnowflakeTableDataObject
- Serializable
- Serializable
- Product
- Equals
- ExpectationValidation
- TransactionalTableDataObject
- CanWriteSparkDataFrame
- CanWriteDataFrame
- CanCreateSparkDataFrame
- TableDataObject
- SchemaValidation
- CanCreateDataFrame
- DataObject
- AtlasExportable
- SmartDataLakeLogger
- ParsableFromConfig
- SdlConfigObject
- ConfigHolder
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
SnowflakeTableDataObject(id: DataObjectId, table: Table, schemaMin: Option[GenericSchema] = None, constraints: Seq[Constraint] = Seq(), expectations: Seq[Expectation] = Seq(), saveMode: SDLSaveMode = SDLSaveMode.Overwrite, connectionId: ConnectionId, comment: Option[String] = None, metadata: Option[DataObjectMetadata] = None)(implicit instanceRegistry: InstanceRegistry)
- id
unique name of this data object
- table
Snowflake table to be written by this output
- constraints
List of row-level Constraints to enforce when writing to this data object.
- expectations
List of Expectations to enforce when writing to this data object. Expectations are checks based on aggregates over all rows of a dataset.
- saveMode
spark SDLSaveMode to use when writing files, default is "overwrite"
- connectionId
The SnowflakeTableConnection to use for the table
- comment
An optional comment to add to the table after writing a DataFrame to it
- metadata
meta data
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
addFieldIfNotExisting(writeSchema: GenericSchema, colName: String, dataType: GenericDataType): GenericSchema
- Attributes
- protected
- Definition Classes
- CanCreateDataFrame
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
atlasName: String
- Definition Classes
- TableDataObject → DataObject → AtlasExportable
-
def
atlasQualifiedName(prefix: String): String
- Definition Classes
- TableDataObject → AtlasExportable
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
- val comment: Option[String]
- val connectionId: ConnectionId
-
val
constraints: Seq[Constraint]
- Definition Classes
- SnowflakeTableDataObject → ExpectationValidation
-
def
createReadSchema(writeSchema: GenericSchema)(implicit context: ActionPipelineContext): GenericSchema
- Definition Classes
- CanCreateDataFrame
- Annotations
- @Scaladoc()
-
def
dropTable(implicit context: ActionPipelineContext): Unit
- Definition Classes
- SnowflakeTableDataObject → TableDataObject
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
val
expectations: Seq[Expectation]
- Definition Classes
- SnowflakeTableDataObject → ExpectationValidation
-
def
factory: FromConfigFactory[DataObject]
- Definition Classes
- SnowflakeTableDataObject → ParsableFromConfig
-
def
forceGenericObservation: Boolean
- Attributes
- protected
- Definition Classes
- ExpectationValidation
- val fullyQualifiedTableName: String
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
getConnection[T <: Connection](connectionId: ConnectionId)(implicit registry: InstanceRegistry, ct: ClassTag[T], tt: scala.reflect.api.JavaUniverse.TypeTag[T]): T
- Attributes
- protected
- Definition Classes
- DataObject
- Annotations
- @Scaladoc()
-
def
getConnectionReg[T <: Connection](connectionId: ConnectionId, registry: InstanceRegistry)(implicit ct: ClassTag[T], tt: scala.reflect.api.JavaUniverse.TypeTag[T]): T
- Attributes
- protected
- Definition Classes
- DataObject
-
def
getDataFrame(partitionValues: Seq[PartitionValues] = Seq(), subFeedType: scala.reflect.api.JavaUniverse.Type)(implicit context: ActionPipelineContext): GenericDataFrame
- Definition Classes
- SnowflakeTableDataObject → CanCreateSparkDataFrame → CanCreateDataFrame
-
def
getPKduplicates(subFeedType: scala.reflect.api.JavaUniverse.Type)(implicit context: ActionPipelineContext): GenericDataFrame
- Definition Classes
- TableDataObject
-
def
getPKnulls(subFeedType: scala.reflect.api.JavaUniverse.Type)(implicit context: ActionPipelineContext): GenericDataFrame
- Definition Classes
- TableDataObject
-
def
getPKviolators(subFeedType: scala.reflect.api.JavaUniverse.Type)(implicit context: ActionPipelineContext): GenericDataFrame
- Definition Classes
- TableDataObject
-
def
getSnowparkDataFrame(partitionValues: Seq[PartitionValues] = Seq())(implicit context: ActionPipelineContext): DataFrame
Read the contents of a table as a Snowpark DataFrame
Read the contents of a table as a Snowpark DataFrame
- Annotations
- @Scaladoc()
-
def
getSparkDataFrame(partitionValues: Seq[PartitionValues] = Seq())(implicit context: ActionPipelineContext): DataFrame
- Definition Classes
- SnowflakeTableDataObject → CanCreateSparkDataFrame
-
def
housekeepingMode: Option[HousekeepingMode]
- Definition Classes
- DataObject
- Annotations
- @Scaladoc()
-
val
id: DataObjectId
- Definition Classes
- SnowflakeTableDataObject → DataObject → SdlConfigObject
-
def
init(df: GenericDataFrame, partitionValues: Seq[PartitionValues], saveModeOptions: Option[SaveModeOptions] = None)(implicit context: ActionPipelineContext): Unit
- Definition Classes
- SnowflakeTableDataObject → CanWriteSparkDataFrame → CanWriteDataFrame
-
def
initSparkDataFrame(df: DataFrame, partitionValues: Seq[PartitionValues], saveModeOptions: Option[SaveModeOptions])(implicit context: ActionPipelineContext): Unit
- Definition Classes
- CanWriteSparkDataFrame
- implicit val instanceRegistry: InstanceRegistry
-
def
isDbExisting(implicit context: ActionPipelineContext): Boolean
- Definition Classes
- SnowflakeTableDataObject → TableDataObject
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
isTableExisting(implicit context: ActionPipelineContext): Boolean
- Definition Classes
- SnowflakeTableDataObject → TableDataObject
-
lazy val
logger: Logger
- Attributes
- protected
- Definition Classes
- SmartDataLakeLogger
- Annotations
- @transient()
-
val
metadata: Option[DataObjectMetadata]
- Definition Classes
- SnowflakeTableDataObject → DataObject
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
options: Map[String, String]
- Definition Classes
- TransactionalTableDataObject → CanWriteSparkDataFrame → CanCreateSparkDataFrame
- val saveMode: SDLSaveMode
-
val
schemaMin: Option[GenericSchema]
- Definition Classes
- SnowflakeTableDataObject → SchemaValidation
-
def
setupConstraintsAndJobExpectations(df: GenericDataFrame)(implicit context: ActionPipelineContext): (GenericDataFrame, DataFrameObservation)
- Definition Classes
- ExpectationValidation
- def snowparkSession: Session
-
def
streamingOptions: Map[String, String]
- Definition Classes
- CanWriteDataFrame
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
var
table: Table
- Definition Classes
- SnowflakeTableDataObject → TableDataObject
-
def
toStringShort: String
- Definition Classes
- DataObject
-
def
validateExpectations(dfJob: GenericDataFrame, dfAll: GenericDataFrame, partitionValues: Seq[PartitionValues], scopeJobMetrics: Map[String, _])(implicit context: ActionPipelineContext): Map[String, _]
- Definition Classes
- ExpectationValidation
-
def
validateSchema(schema: GenericSchema, schemaExpected: GenericSchema, role: String): Unit
- Definition Classes
- SchemaValidation
- Annotations
- @Scaladoc()
-
def
validateSchemaMin(schema: GenericSchema, role: String): Unit
- Definition Classes
- SchemaValidation
- Annotations
- @Scaladoc()
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
def
writeDataFrame(df: GenericDataFrame, partitionValues: Seq[PartitionValues], isRecursiveInput: Boolean, saveModeOptions: Option[SaveModeOptions])(implicit context: ActionPipelineContext): Unit
- Definition Classes
- SnowflakeTableDataObject → CanWriteSparkDataFrame → CanWriteDataFrame
-
def
writeSnowparkDataFrame(df: DataFrame, partitionValues: Seq[PartitionValues], isRecursiveInput: Boolean = false, saveModeOptions: Option[SaveModeOptions] = None)(implicit context: ActionPipelineContext): Unit
Write a Snowpark DataFrame to Snowflake, used in Snowpark actions
Write a Snowpark DataFrame to Snowflake, used in Snowpark actions
- Annotations
- @Scaladoc()
-
def
writeSparkDataFrame(df: DataFrame, partitionValues: Seq[PartitionValues], isRecursiveInput: Boolean, saveModeOptions: Option[SaveModeOptions])(implicit context: ActionPipelineContext): Unit
- Definition Classes
- SnowflakeTableDataObject → CanWriteSparkDataFrame
-
def
writeStreamingDataFrame(df: GenericDataFrame, trigger: Trigger, options: Map[String, String], checkpointLocation: String, queryName: String, outputMode: OutputMode, saveModeOptions: Option[SaveModeOptions])(implicit context: ActionPipelineContext): StreamingQuery
- Definition Classes
- CanWriteSparkDataFrame → CanWriteDataFrame
Deprecated Value Members
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] ) @Deprecated
- Deprecated