public static class EncoderHelpers.EncodeUsingBeamCoder<T>
extends org.apache.spark.sql.catalyst.expressions.UnaryExpression
implements org.apache.spark.sql.catalyst.expressions.NonSQLExpression, java.io.Serializable
Coder.| Constructor and Description |
|---|
EncodeUsingBeamCoder(org.apache.spark.sql.catalyst.expressions.Expression child,
Coder<T> coder) |
| Modifier and Type | Method and Description |
|---|---|
boolean |
canEqual(java.lang.Object that) |
org.apache.spark.sql.catalyst.expressions.Expression |
child() |
org.apache.spark.sql.types.DataType |
dataType() |
org.apache.spark.sql.catalyst.expressions.codegen.ExprCode |
doGenCode(org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext ctx,
org.apache.spark.sql.catalyst.expressions.codegen.ExprCode ev) |
static <T> byte[] |
encode(boolean isNull,
T value,
Coder<T> coder)
Convert value to byte array (invoked by generated code in
doGenCode(CodegenContext,
ExprCode)). |
boolean |
equals(@Nullable java.lang.Object o) |
int |
hashCode() |
int |
productArity() |
java.lang.Object |
productElement(int n) |
children, defineCodeGen, eval, foldable, nullable, nullSafeCodeGen, nullSafeEvalcanonicalized, checkInputDataTypes, childrenResolved, deterministic, eval$default$1, flatArguments, genCode, org$apache$spark$sql$catalyst$expressions$Expression$$reduceCodeSize, prettyName, references, resolved, semanticEquals, semanticHash, simpleString, sql, toString, verboseStringapply, argString, asCode, collect, collectFirst, collectLeaves, containsChild, fastEquals, find, flatMap, foreach, foreachUp, generateTreeString, generateTreeString$default$5, generateTreeString$default$6, innerChildren, jsonFields, makeCopy, map, mapChildren, mapProductIterator, nodeName, numberedTreeString, org$apache$spark$sql$catalyst$trees$TreeNode$$allChildren, org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1, org$apache$spark$sql$catalyst$trees$TreeNode$$getNodeNumbered, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$1, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$2, org$apache$spark$sql$catalyst$trees$TreeNode$$mapTreeNode$1, org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson, org$apache$spark$sql$catalyst$trees$TreeNode$$simpleClassName, origin, otherCopyArgs, p, prettyJson, productIterator, productPrefix, stringArgs, toJSON, transform, transformDown, transformUp, treeString, treeString, treeString$default$2, verboseStringWithSuffix, withNewChildrenpublic org.apache.spark.sql.catalyst.expressions.Expression child()
child in class org.apache.spark.sql.catalyst.expressions.UnaryExpressionpublic org.apache.spark.sql.catalyst.expressions.codegen.ExprCode doGenCode(org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext ctx,
org.apache.spark.sql.catalyst.expressions.codegen.ExprCode ev)
doGenCode in class org.apache.spark.sql.catalyst.expressions.Expressionpublic org.apache.spark.sql.types.DataType dataType()
dataType in class org.apache.spark.sql.catalyst.expressions.Expressionpublic java.lang.Object productElement(int n)
productElement in interface scala.Productpublic int productArity()
productArity in interface scala.Productpublic boolean canEqual(java.lang.Object that)
canEqual in interface scala.Equalspublic boolean equals(@Nullable java.lang.Object o)
equals in interface scala.Equalsequals in class java.lang.Objectpublic int hashCode()
hashCode in class org.apache.spark.sql.catalyst.trees.TreeNode<org.apache.spark.sql.catalyst.expressions.Expression>public static <T> byte[] encode(boolean isNull,
T value,
Coder<T> coder)
doGenCode(CodegenContext,
ExprCode)).