Uploaded image for project: 'Spark'
  1. Spark
  2. SPARK-12233

Cannot specify a data frame column during join

    XMLWordPrintableJSON

Details

    • Bug
    • Status: Closed
    • Minor
    • Resolution: Not A Problem
    • 1.5.2
    • None
    • SQL
    • None

    Description

              sqlContext.udf.register("lowercase", (s: String) =>{
                if (null == s) "" else s.toLowerCase
              })
              
              sqlContext.udf.register("substr", (s: String) =>{
                if (null == s) ""
                else {
                  val index = s.indexOf("@")
                  if (index < 0) s else s.toLowerCase.substring(index + 1)}
              })
              
              sqlContext.read.orc("/data/test/test.data")
              .registerTempTable("testTable")
              
              val extracted = 
              sqlContext.sql(""" SELECT lowercase(given_name) AS given_name, 
              		                  lowercase(family_name) AS family_name, 
              		                  substr(email_address) AS domain, 
              		                  lowercase(email_address) AS emailaddr,
              		                  experience      		                  
              		           FROM testTable 
              		           WHERE email_address != '' 
              		       """)
              		   .distinct
              
              val count =                
                   extracted.groupBy("given_name", "family_name", "domain")
                                 .count
              
              count.where(count("count") > 1)
                   .drop(count("count"))
                   .join(extracted, Seq("given_name", "family_name", "domain"))
      

      .select(count("given_name"), count("family_name"), extracted("emailaddr"))

      Red Font should be:
      select("given_name", "family_name", "emailaddr")
      ========

      org.apache.spark.sql.AnalysisException: resolved attribute(s) emailaddr#525 missing from given_name#522,domain#524,url#517,family_name#523,emailaddr#532,experience#490 in operator !Project [given_name#522,family_name#523,emailaddr#525];
      	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.failAnalysis(CheckAnalysis.scala:37)
      	at org.apache.spark.sql.catalyst.analysis.Analyzer.failAnalysis(Analyzer.scala:44)
      	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:154)
      	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:49)
      	at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:103)
      	at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.checkAnalysis(CheckAnalysis.scala:49)
      	at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:44)
      	at org.apache.spark.sql.SQLContext$QueryExecution.assertAnalyzed(SQLContext.scala:914)
      	at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:132)
      	at org.apache.spark.sql.DataFrame.org$apache$spark$sql$DataFrame$$logicalPlanToDataFrame(DataFrame.scala:154)
      	at org.apache.spark.sql.DataFrame.select(DataFrame.scala:691)
      	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:38)
      	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:43)
      	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:45)
      	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:47)
      	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:49)
      	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:51)
      	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:53)
      	at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:55)
      	at $iwC$$iwC$$iwC$$iwC.<init>(<console>:57)
      	at $iwC$$iwC$$iwC.<init>(<console>:59)
      	at $iwC$$iwC.<init>(<console>:61)
      	at $iwC.<init>(<console>:63)
      	at <init>(<console>:65)
      	at .<init>(<console>:69)
      	at .<clinit>(<console>)
      	at .<init>(<console>:7)
      	at .<clinit>(<console>)
      	at $print(<console>)
      	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
      	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      	at java.lang.reflect.Method.invoke(Method.java:606)
      	at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
      	at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340)
      	at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
      	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
      	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
      	at org.apache.zeppelin.spark.SparkInterpreter.interpretInput(SparkInterpreter.java:675)
      	at org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:640)
      	at org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:633)
      	at org.apache.zeppelin.interpreter.ClassloaderInterpreter.interpret(ClassloaderInterpreter.java:57)
      	at org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:93)
      	at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:276)
      	at org.apache.zeppelin.scheduler.Job.run(Job.java:170)
      	at org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:118)
      	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
      	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
      	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:178)
      	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:292)
      	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
      	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
      	at java.lang.Thread.run(Thread.java:745)
      

      Attachments

        Activity

          People

            Unassigned Unassigned
            azuryy Fengdong Yu
            Votes:
            0 Vote for this issue
            Watchers:
            5 Start watching this issue

            Dates

              Created:
              Updated:
              Resolved: