io.smartdatalake.workflow.action.snowflake.transformer
ScalaClassSnowparkDfTransformer
Companion object ScalaClassSnowparkDfTransformer
case class ScalaClassSnowparkDfTransformer(name: String = "scalaSparkTransform", description: Option[String] = None, className: String, options: Map[String, String] = Map(), runtimeOptions: Map[String, String] = Map()) extends OptionsGenericDfTransformer with Product with Serializable
Configuration of a custom Snowpark-DataFrame transformation between one input and one output (1:1) as Java/Scala Class. Define a transform function which receives a DataObjectId, a DataFrame and a map of options and has to return a DataFrame. The Java/Scala class has to implement interface CustomSnowparkDfTransformer.
- name
name of the transformer
- description
Optional description of the transformer
- className
class name implementing trait CustomSnowparkDfTransformer
- options
Options to pass to the transformation
- runtimeOptions
optional tuples of [key, spark sql expression] to be added as additional options when executing transformation. The spark sql expressions are evaluated against an instance of DefaultExpressionData.
- Annotations
- @Scaladoc()
- Alphabetic
- By Inheritance
- ScalaClassSnowparkDfTransformer
- Serializable
- Serializable
- Product
- Equals
- OptionsGenericDfTransformer
- GenericDfTransformer
- ConfigHolder
- ParsableFromConfig
- GenericDfTransformerDef
- PartitionValueTransformer
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
ScalaClassSnowparkDfTransformer(name: String = "scalaSparkTransform", description: Option[String] = None, className: String, options: Map[String, String] = Map(), runtimeOptions: Map[String, String] = Map())
- name
name of the transformer
- description
Optional description of the transformer
- className
class name implementing trait CustomSnowparkDfTransformer
- options
Options to pass to the transformation
- runtimeOptions
optional tuples of [key, spark sql expression] to be added as additional options when executing transformation. The spark sql expressions are evaluated against an instance of DefaultExpressionData.
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- val className: String
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
-
val
description: Option[String]
- Definition Classes
- ScalaClassSnowparkDfTransformer → GenericDfTransformerDef
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
factory: FromConfigFactory[GenericDfTransformer]
- Definition Classes
- ScalaClassSnowparkDfTransformer → ParsableFromConfig
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
getSubFeedSupportedType: scala.reflect.api.JavaUniverse.Type
- Definition Classes
- ScalaClassSnowparkDfTransformer → GenericDfTransformerDef
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
val
name: String
- Definition Classes
- ScalaClassSnowparkDfTransformer → GenericDfTransformerDef
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
val
options: Map[String, String]
- Definition Classes
- ScalaClassSnowparkDfTransformer → OptionsGenericDfTransformer
-
def
prepare(actionId: ActionId)(implicit context: ActionPipelineContext): Unit
- Definition Classes
- GenericDfTransformerDef
- Annotations
- @Scaladoc()
-
val
runtimeOptions: Map[String, String]
- Definition Classes
- ScalaClassSnowparkDfTransformer → OptionsGenericDfTransformer
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
final
def
transform(actionId: ActionId, partitionValues: Seq[PartitionValues], df: GenericDataFrame, dataObjectId: DataObjectId, previousTransformerName: Option[String], executionModeResultOptions: Map[String, String])(implicit context: ActionPipelineContext): GenericDataFrame
- Definition Classes
- OptionsGenericDfTransformer → GenericDfTransformerDef
-
final
def
transformPartitionValues(actionId: ActionId, partitionValues: Seq[PartitionValues], executionModeResultOptions: Map[String, String])(implicit context: ActionPipelineContext): Option[Map[PartitionValues, PartitionValues]]
- Definition Classes
- OptionsGenericDfTransformer → PartitionValueTransformer
-
def
transformPartitionValuesWithOptions(actionId: ActionId, partitionValues: Seq[PartitionValues], options: Map[String, String])(implicit context: ActionPipelineContext): Option[Map[PartitionValues, PartitionValues]]
- Definition Classes
- ScalaClassSnowparkDfTransformer → OptionsGenericDfTransformer
-
def
transformWithOptions(actionId: ActionId, partitionValues: Seq[PartitionValues], df: GenericDataFrame, dataObjectId: DataObjectId, options: Map[String, String])(implicit context: ActionPipelineContext): GenericDataFrame
- Definition Classes
- ScalaClassSnowparkDfTransformer → OptionsGenericDfTransformer
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
Deprecated Value Members
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] ) @Deprecated
- Deprecated