public static class EncoderHelpers.EncodeUsingBeamCoder<T>
extends org.apache.spark.sql.catalyst.expressions.UnaryExpression
implements org.apache.spark.sql.catalyst.expressions.NonSQLExpression, java.io.Serializable
Coder
.Constructor and Description |
---|
EncodeUsingBeamCoder(org.apache.spark.sql.catalyst.expressions.Expression child,
Coder<T> coder) |
Modifier and Type | Method and Description |
---|---|
boolean |
canEqual(java.lang.Object that) |
org.apache.spark.sql.catalyst.expressions.Expression |
child() |
org.apache.spark.sql.types.DataType |
dataType() |
org.apache.spark.sql.catalyst.expressions.codegen.ExprCode |
doGenCode(org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext ctx,
org.apache.spark.sql.catalyst.expressions.codegen.ExprCode ev) |
static <T> byte[] |
encode(boolean isNull,
T value,
Coder<T> coder)
Convert value to byte array (invoked by generated code in
doGenCode(CodegenContext,
ExprCode) ). |
boolean |
equals(@Nullable java.lang.Object o) |
int |
hashCode() |
int |
productArity() |
java.lang.Object |
productElement(int n) |
children, defineCodeGen, eval, foldable, nullable, nullSafeCodeGen, nullSafeEval
canonicalized, checkInputDataTypes, childrenResolved, deterministic, eval$default$1, flatArguments, genCode, org$apache$spark$sql$catalyst$expressions$Expression$$reduceCodeSize, prettyName, references, resolved, semanticEquals, semanticHash, simpleString, sql, toString, verboseString
apply, argString, asCode, collect, collectFirst, collectLeaves, containsChild, fastEquals, find, flatMap, foreach, foreachUp, generateTreeString, generateTreeString$default$5, generateTreeString$default$6, innerChildren, jsonFields, makeCopy, map, mapChildren, mapProductIterator, nodeName, numberedTreeString, org$apache$spark$sql$catalyst$trees$TreeNode$$allChildren, org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1, org$apache$spark$sql$catalyst$trees$TreeNode$$getNodeNumbered, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$1, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$2, org$apache$spark$sql$catalyst$trees$TreeNode$$mapTreeNode$1, org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson, org$apache$spark$sql$catalyst$trees$TreeNode$$simpleClassName, origin, otherCopyArgs, p, prettyJson, productIterator, productPrefix, stringArgs, toJSON, transform, transformDown, transformUp, treeString, treeString, treeString$default$2, verboseStringWithSuffix, withNewChildren
public org.apache.spark.sql.catalyst.expressions.Expression child()
child
in class org.apache.spark.sql.catalyst.expressions.UnaryExpression
public org.apache.spark.sql.catalyst.expressions.codegen.ExprCode doGenCode(org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext ctx, org.apache.spark.sql.catalyst.expressions.codegen.ExprCode ev)
doGenCode
in class org.apache.spark.sql.catalyst.expressions.Expression
public org.apache.spark.sql.types.DataType dataType()
dataType
in class org.apache.spark.sql.catalyst.expressions.Expression
public java.lang.Object productElement(int n)
productElement
in interface scala.Product
public int productArity()
productArity
in interface scala.Product
public boolean canEqual(java.lang.Object that)
canEqual
in interface scala.Equals
public boolean equals(@Nullable java.lang.Object o)
equals
in interface scala.Equals
equals
in class java.lang.Object
public int hashCode()
hashCode
in class org.apache.spark.sql.catalyst.trees.TreeNode<org.apache.spark.sql.catalyst.expressions.Expression>
public static <T> byte[] encode(boolean isNull, T value, Coder<T> coder)
doGenCode(CodegenContext,
ExprCode)
).