Uploaded image for project: 'Spark'
  1. Spark
  2. SPARK-25580

com.mongodb.spark.exceptions.MongoTypeConversionException: Cannot cast STRING into a DoubleType

    XMLWordPrintableJSON

Details

    • Bug
    • Status: Resolved
    • Major
    • Resolution: Invalid
    • 2.3.0
    • None
    • Spark Core

    Description

      Getting the below exception when trying to read the data from mongodb to spark dataframe,.

      com.mongodb.spark.exceptions.MongoTypeConversionException: Cannot cast STRING into a DoubleType (value: BsonString{value='11.70'})
      at com.mongodb.spark.sql.MapFunctions$.com$mongodb$spark$sql$MapFunctions$$convertToDataType(MapFunctions.scala:200)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:39)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:37)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
      at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
      at com.mongodb.spark.sql.MapFunctions$.documentToRow(MapFunctions.scala:37)
      at com.mongodb.spark.sql.MapFunctions$.castToStructType(MapFunctions.scala:222)
      at com.mongodb.spark.sql.MapFunctions$.com$mongodb$spark$sql$MapFunctions$$convertToDataType(MapFunctions.scala:194)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:39)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:37)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
      at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
      at com.mongodb.spark.sql.MapFunctions$.documentToRow(MapFunctions.scala:37)
      at com.mongodb.spark.sql.MongoRelation$$anonfun$buildScan$1.apply(MongoRelation.scala:58)
      at com.mongodb.spark.sql.MongoRelation$$anonfun$buildScan$1.apply(MongoRelation.scala:58)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$10.next(Iterator.scala:354)
      at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:254)
      at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247)
      at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:830)
      at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:830)
      at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
      at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
      at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
      at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
      at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
      at org.apache.spark.scheduler.Task.run(Task.scala:109)
      at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
      at java.lang.Thread.run(Thread.java:748)
      18/10/01 16:52:44 WARN TaskSetManager: Lost task 0.0 in stage 3.0 (TID 3, localhost, executor driver): com.mongodb.spark.exceptions.MongoTypeConversionException: Cannot cast STRING into a DoubleType (value: BsonString{value='11.70'})
      at com.mongodb.spark.sql.MapFunctions$.com$mongodb$spark$sql$MapFunctions$$convertToDataType(MapFunctions.scala:200)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:39)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:37)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
      at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
      at com.mongodb.spark.sql.MapFunctions$.documentToRow(MapFunctions.scala:37)
      at com.mongodb.spark.sql.MapFunctions$.castToStructType(MapFunctions.scala:222)
      at com.mongodb.spark.sql.MapFunctions$.com$mongodb$spark$sql$MapFunctions$$convertToDataType(MapFunctions.scala:194)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:39)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:37)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
      at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
      at com.mongodb.spark.sql.MapFunctions$.documentToRow(MapFunctions.scala:37)
      at com.mongodb.spark.sql.MongoRelation$$anonfun$buildScan$1.apply(MongoRelation.scala:58)
      at com.mongodb.spark.sql.MongoRelation$$anonfun$buildScan$1.apply(MongoRelation.scala:58)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$10.next(Iterator.scala:354)
      at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:254)
      at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247)
      at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:830)
      at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:830)
      at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
      at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
      at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
      at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
      at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
      at org.apache.spark.scheduler.Task.run(Task.scala:109)
      at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
      at java.lang.Thread.run(Thread.java:748)

      18/10/01 16:52:44 ERROR TaskSetManager: Task 0 in stage 3.0 failed 1 times; aborting job
      18/10/01 16:52:44 INFO TaskSchedulerImpl: Removed TaskSet 3.0, whose tasks have all completed, from pool
      18/10/01 16:52:44 INFO TaskSchedulerImpl: Cancelling stage 3
      18/10/01 16:52:44 INFO DAGScheduler: ResultStage 3 (take at ReadMongo.scala:23) failed in 2.305 s due to Job aborted due to stage failure: Task 0 in stage 3.0 failed 1 times, most recent failure: Lost task 0.0 in stage 3.0 (TID 3, localhost, executor driver): com.mongodb.spark.exceptions.MongoTypeConversionException: Cannot cast STRING into a DoubleType (value: BsonString{value='11.70'})
      at com.mongodb.spark.sql.MapFunctions$.com$mongodb$spark$sql$MapFunctions$$convertToDataType(MapFunctions.scala:200)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:39)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:37)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
      at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
      at com.mongodb.spark.sql.MapFunctions$.documentToRow(MapFunctions.scala:37)
      at com.mongodb.spark.sql.MapFunctions$.castToStructType(MapFunctions.scala:222)
      at com.mongodb.spark.sql.MapFunctions$.com$mongodb$spark$sql$MapFunctions$$convertToDataType(MapFunctions.scala:194)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:39)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:37)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
      at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
      at com.mongodb.spark.sql.MapFunctions$.documentToRow(MapFunctions.scala:37)
      at com.mongodb.spark.sql.MongoRelation$$anonfun$buildScan$1.apply(MongoRelation.scala:58)
      at com.mongodb.spark.sql.MongoRelation$$anonfun$buildScan$1.apply(MongoRelation.scala:58)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$10.next(Iterator.scala:354)
      at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:254)
      at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247)
      at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:830)
      at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:830)
      at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
      at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
      at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
      at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
      at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
      at org.apache.spark.scheduler.Task.run(Task.scala:109)
      at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
      at java.lang.Thread.run(Thread.java:748)

      Driver stacktrace:
      18/10/01 16:52:44 INFO DAGScheduler: Job 3 failed: take at ReadMongo.scala:23, took 2.307729 s
      Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 3.0 failed 1 times, most recent failure: Lost task 0.0 in stage 3.0 (TID 3, localhost, executor driver): com.mongodb.spark.exceptions.MongoTypeConversionException: Cannot cast STRING into a DoubleType (value: BsonString{value='11.70'})
      at com.mongodb.spark.sql.MapFunctions$.com$mongodb$spark$sql$MapFunctions$$convertToDataType(MapFunctions.scala:200)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:39)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:37)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
      at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
      at com.mongodb.spark.sql.MapFunctions$.documentToRow(MapFunctions.scala:37)
      at com.mongodb.spark.sql.MapFunctions$.castToStructType(MapFunctions.scala:222)
      at com.mongodb.spark.sql.MapFunctions$.com$mongodb$spark$sql$MapFunctions$$convertToDataType(MapFunctions.scala:194)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:39)
      at com.mongodb.spark.sql.MapFunctions$$anonfun$3.apply(MapFunctions.scala:37)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
      at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
      at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
      at com.mongodb.spark.sql.MapFunctions$.documentToRow(MapFunctions.scala:37)
      at com.mongodb.spark.sql.MongoRelation$$anonfun$buildScan$1.apply(MongoRelation.scala:58)
      at com.mongodb.spark.sql.MongoRelation$$anonfun$buildScan$1.apply(MongoRelation.scala:58)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:370)
      at scala.collection.Iterator$$anon$10.next(Iterator.scala:354)
      at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:254)
      at org.apache.spark.sql.execution.SparkPlan$$anonfun$2.apply(SparkPlan.scala:247)
      at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:830)
      at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$25.apply(RDD.scala:830)
      at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
      at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
      at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:324)
      at org.apache.spark.rdd.RDD.iterator(RDD.scala:288)
      at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
      at org.apache.spark.scheduler.Task.run(Task.scala:109)
      at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
      at java.lang.Thread.run(Thread.java:748)

      Attachments

        Activity

          People

            Unassigned Unassigned
            hariprasad1 Hariprasad Allaka
            Votes:
            0 Vote for this issue
            Watchers:
            1 Start watching this issue

            Dates

              Created:
              Updated:
              Resolved: