spark catalyst中的DSL 解析

spark catalyst中的DSL 解析

来自源代码

import org.apache.spark.sql.catalyst.expressions.Literal

import org.apache.spark.sql.catalyst.dsl.expressions._


Literal(1) + Literal(1)

'a.attr

'b.attr.toString()

'a === 'b

import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.dsl.plans._

LocalRelation('key.int, 'value.string).where('key === 1).select('value).analyze

运行结果




import org.apache.spark.sql.catalyst.expressions.Literal

import org.apache.spark.sql.catalyst.dsl.`package`.expressions._

res0: org.apache.spark.sql.catalyst.expressions.Expression = (1 + 1)

res1: org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute = 'a
res2: String = 'b
res3: org.apache.spark.sql.catalyst.expressions.Predicate = ('a = 'b)
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.dsl.`package`.plans._


res4: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan = Project [value#5]
+- Filter (key#4 = 1)
   +- LocalRelation , [key#4, value#5]




你可能感兴趣的:(spark catalyst中的DSL 解析)