public static class EncoderHelpers.DecodeUsingBeamCoder<T>
extends org.apache.spark.sql.catalyst.expressions.UnaryExpression
implements org.apache.spark.sql.catalyst.expressions.NonSQLExpression, java.io.Serializable
Coder.| Constructor and Description |
|---|
DecodeUsingBeamCoder(org.apache.spark.sql.catalyst.expressions.Expression child,
scala.reflect.ClassTag<T> classTag,
Coder<T> beamCoder) |
| Modifier and Type | Method and Description |
|---|---|
boolean |
canEqual(java.lang.Object that) |
org.apache.spark.sql.catalyst.expressions.Expression |
child() |
org.apache.spark.sql.types.DataType |
dataType() |
org.apache.spark.sql.catalyst.expressions.codegen.ExprCode |
doGenCode(org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext ctx,
org.apache.spark.sql.catalyst.expressions.codegen.ExprCode ev) |
boolean |
equals(java.lang.Object o) |
int |
hashCode() |
java.lang.Object |
nullSafeEval(java.lang.Object input) |
int |
productArity() |
java.lang.Object |
productElement(int n) |
children, defineCodeGen, eval, foldable, nullable, nullSafeCodeGencanonicalized, checkInputDataTypes, childrenResolved, deterministic, eval$default$1, flatArguments, genCode, org$apache$spark$sql$catalyst$expressions$Expression$$reduceCodeSize, prettyName, references, resolved, semanticEquals, semanticHash, simpleString, sql, toString, verboseStringapply, argString, asCode, collect, collectFirst, collectLeaves, containsChild, fastEquals, find, flatMap, foreach, foreachUp, generateTreeString, generateTreeString$default$5, generateTreeString$default$6, innerChildren, jsonFields, makeCopy, map, mapChildren, mapProductIterator, nodeName, numberedTreeString, org$apache$spark$sql$catalyst$trees$TreeNode$$allChildren, org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1, org$apache$spark$sql$catalyst$trees$TreeNode$$getNodeNumbered, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$1, org$apache$spark$sql$catalyst$trees$TreeNode$$mapChild$2, org$apache$spark$sql$catalyst$trees$TreeNode$$mapTreeNode$1, org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson, origin, otherCopyArgs, p, prettyJson, productIterator, productPrefix, stringArgs, toJSON, transform, transformDown, transformUp, treeString, treeString, treeString$default$2, verboseStringWithSuffix, withNewChildrenpublic org.apache.spark.sql.catalyst.expressions.Expression child()
child in class org.apache.spark.sql.catalyst.expressions.UnaryExpressionpublic org.apache.spark.sql.catalyst.expressions.codegen.ExprCode doGenCode(org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext ctx,
org.apache.spark.sql.catalyst.expressions.codegen.ExprCode ev)
doGenCode in class org.apache.spark.sql.catalyst.expressions.Expressionpublic java.lang.Object nullSafeEval(java.lang.Object input)
nullSafeEval in class org.apache.spark.sql.catalyst.expressions.UnaryExpressionpublic org.apache.spark.sql.types.DataType dataType()
dataType in class org.apache.spark.sql.catalyst.expressions.Expressionpublic java.lang.Object productElement(int n)
productElement in interface scala.Productpublic int productArity()
productArity in interface scala.Productpublic boolean canEqual(java.lang.Object that)
canEqual in interface scala.Equalspublic boolean equals(java.lang.Object o)
equals in interface scala.Equalsequals in class java.lang.Objectpublic int hashCode()
hashCode in class org.apache.spark.sql.catalyst.trees.TreeNode<org.apache.spark.sql.catalyst.expressions.Expression>