io.smartdatalake.workflow.action.snowflake.transformer
ScalaClassSnowparkDfsTransformer
Companion object ScalaClassSnowparkDfsTransformer
case class ScalaClassSnowparkDfsTransformer(name: String = "snowparkScalaTransform", description: Option[String] = None, className: String, options: Map[String, String] = Map(), runtimeOptions: Map[String, String] = Map()) extends OptionsGenericDfsTransformer with Product with Serializable
Configuration of a custom Spark-DataFrame transformation between many inputs and many outputs (n:m) Define a transform function which receives a map of input DataObjectIds with DataFrames and a map of options and as to return a map of output DataObjectIds with DataFrames, see also trait CustomSnowparkDfsTransformer.
- name
name of the transformer
- description
Optional description of the transformer
- className
class name implementing trait CustomSnowparkDfsTransformer
- options
Options to pass to the transformation
- runtimeOptions
optional tuples of [key, spark sql expression] to be added as additional options when executing transformation. The spark sql expressions are evaluated against an instance of DefaultExpressionData.
- Annotations
- @Scaladoc()
- Alphabetic
- By Inheritance
- ScalaClassSnowparkDfsTransformer
- Serializable
- Serializable
- Product
- Equals
- OptionsGenericDfsTransformer
- GenericDfsTransformer
- ConfigHolder
- ParsableFromConfig
- GenericDfsTransformerDef
- PartitionValueTransformer
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
ScalaClassSnowparkDfsTransformer(name: String = "snowparkScalaTransform", description: Option[String] = None, className: String, options: Map[String, String] = Map(), runtimeOptions: Map[String, String] = Map())
- name
name of the transformer
- description
Optional description of the transformer
- className
class name implementing trait CustomSnowparkDfsTransformer
- options
Options to pass to the transformation
- runtimeOptions
optional tuples of [key, spark sql expression] to be added as additional options when executing transformation. The spark sql expressions are evaluated against an instance of DefaultExpressionData.
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- val className: String
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native() @HotSpotIntrinsicCandidate()
-
val
description: Option[String]
- Definition Classes
- ScalaClassSnowparkDfsTransformer → GenericDfsTransformerDef
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
factory: FromConfigFactory[GenericDfsTransformer]
- Definition Classes
- ScalaClassSnowparkDfsTransformer → ParsableFromConfig
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
def
getSubFeedSupportedType: scala.reflect.api.JavaUniverse.Type
- Definition Classes
- ScalaClassSnowparkDfsTransformer → GenericDfsTransformerDef
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
val
name: String
- Definition Classes
- ScalaClassSnowparkDfsTransformer → GenericDfsTransformerDef
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native() @HotSpotIntrinsicCandidate()
-
val
options: Map[String, String]
- Definition Classes
- ScalaClassSnowparkDfsTransformer → OptionsGenericDfsTransformer
-
def
prepare(actionId: ActionId)(implicit context: ActionPipelineContext): Unit
- Definition Classes
- GenericDfsTransformerDef
- Annotations
- @Scaladoc()
-
val
runtimeOptions: Map[String, String]
- Definition Classes
- ScalaClassSnowparkDfsTransformer → OptionsGenericDfsTransformer
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
transform(actionId: ActionId, partitionValues: Seq[PartitionValues], dfs: Map[String, GenericDataFrame], executionModeResultOptions: Map[String, String])(implicit context: ActionPipelineContext): Map[String, GenericDataFrame]
- Definition Classes
- OptionsGenericDfsTransformer → GenericDfsTransformerDef
-
def
transformPartitionValues(actionId: ActionId, partitionValues: Seq[PartitionValues], executionModeResultOptions: Map[String, String])(implicit context: ActionPipelineContext): Option[Map[PartitionValues, PartitionValues]]
- Definition Classes
- OptionsGenericDfsTransformer → PartitionValueTransformer
-
def
transformPartitionValuesWithOptions(actionId: ActionId, partitionValues: Seq[PartitionValues], options: Map[String, String])(implicit context: ActionPipelineContext): Option[Map[PartitionValues, PartitionValues]]
- Definition Classes
- OptionsGenericDfsTransformer
- Annotations
- @Scaladoc()
-
def
transformWithOptions(actionId: ActionId, partitionValues: Seq[PartitionValues], dfs: Map[String, GenericDataFrame], options: Map[String, String])(implicit context: ActionPipelineContext): Map[String, GenericDataFrame]
- Definition Classes
- ScalaClassSnowparkDfsTransformer → OptionsGenericDfsTransformer
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
Deprecated Value Members
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] ) @Deprecated
- Deprecated