Details
-
Bug
-
Status: Resolved
-
Major
-
Resolution: Not A Bug
-
2.1.1
-
None
-
None
Description
Java keywords doesn't work in spark211 that works in spark 201
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
case class a(`const`: Int)
case class b(aa: a)
object KeyworkdsTest {
def main(args: Array[String]): Unit =
{ val conf = new SparkConf().setAppName("scala").setMaster("local[2]") val sc = new SparkContext(conf) val spark = SparkSession.builder().enableHiveSupport().config(conf).getOrCreate() val q = Seq(b(a(1))) val rdd = sc.makeRDD(q) val d = spark.createDataFrame(rdd) }}