Description
Got an issue similar to https://issues.apache.org/jira/browse/SPARK-8897, but with the Decimal datatype coming from a Postgres DB:
//Set up SparkR
>Sys.setenv(SPARK_HOME="/Users/ashkurenko/work/git_repos/spark")
>Sys.setenv(SPARKR_SUBMIT_ARGS="--driver-class-path ~/Downloads/postgresql-9.4-1201.jdbc4.jar sparkr-shell")
>.libPaths(c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib"), .libPaths()))
>library(SparkR)
>sc <- sparkR.init(master="local")
// Connect to a Postgres DB via JDBC
>sqlContext <- sparkRSQL.init(sc)
>sql(sqlContext, "
CREATE TEMPORARY TABLE mytable
USING org.apache.spark.sql.jdbc
OPTIONS (url 'jdbc:postgresql://servername:5432/dbname'
,dbtable 'mydbtable'
)
")
// Try pulling a Decimal column from a table
>myDataFrame <- sql(sqlContext,("select a_decimal_column from mytable "))
// The schema shows up fine
>show(myDataFrame)
DataFrame[a_decimal_column:decimal(10,0)]
>schema(myDataFrame)
StructType
-name = "a_decimal_column", type = "DecimalType(10,0)", nullable = TRUE |
// ... but pulling data fails:
localDF <- collect(myDataFrame)
Error in as.data.frame.default(x[[i]], optional = TRUE) :
cannot coerce class ""jobj"" to a data.frame