Uploaded image for project: 'Spark'
  1. Spark
  2. SPARK-16725

Migrate Guava to 16+?

    XMLWordPrintableJSON

Details

    • Improvement
    • Status: Resolved
    • Minor
    • Resolution: Won't Fix
    • 2.0.1
    • None
    • Build
    • None

    Description

      Currently Spark depends on an old version of Guava, version 14. However Spark-cassandra driver asserts on Guava version 16 and above.

      It would be great to update the Guava dependency to version 16+

      diff --git a/core/src/main/scala/org/apache/spark/SecurityManager.scala b/core/src/main/scala/org/apache/spark/SecurityManager.scala
      index f72c7de..abddafe 100644
      — a/core/src/main/scala/org/apache/spark/SecurityManager.scala
      +++ b/core/src/main/scala/org/apache/spark/SecurityManager.scala
      @@ -23,7 +23,7 @@ import java.security.

      {KeyStore, SecureRandom}

      import java.security.cert.X509Certificate
      import javax.net.ssl._

      -import com.google.common.hash.HashCodes
      +import com.google.common.hash.HashCode
      import com.google.common.io.Files
      import org.apache.hadoop.io.Text

      @@ -432,7 +432,7 @@ private[spark] class SecurityManager(sparkConf: SparkConf)
      val secret = new Array[Byte](length)
      rnd.nextBytes(secret)

      • val cookie = HashCodes.fromBytes(secret).toString()
        + val cookie = HashCode.fromBytes(secret).toString()
        SparkHadoopUtil.get.addSecretKeyToUserCredentials(SECRET_LOOKUP_KEY, cookie)
        cookie
        } else {
        diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
        index af50a6d..02545ae 100644
          • a/core/src/main/scala/org/apache/spark/SparkEnv.scala
            +++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
            @@ -72,7 +72,7 @@ class SparkEnv (

      // A general, soft-reference map for metadata needed during HadoopRDD split computation
      // (e.g., HadoopFileRDD uses this to cache JobConfs and InputFormats).

      • private[spark] val hadoopJobMetadata = new MapMaker().softValues().makeMap[String, Any]()
        + private[spark] val hadoopJobMetadata = new MapMaker().weakValues().makeMap[String, Any]()

      private[spark] var driverTmpDir: Option[String] = None

      diff --git a/pom.xml b/pom.xml
      index d064cb5..7c3e036 100644
      — a/pom.xml
      +++ b/pom.xml
      @@ -368,8 +368,7 @@
      <dependency>
      <groupId>com.google.guava</groupId>
      <artifactId>guava</artifactId>

      • <version>14.0.1</version>
      • <scope>provided</scope>
        + <version>19.0</version>
        </dependency>
        <!-- End of shaded deps -->
        <dependency>

      Attachments

        Activity

          People

            Unassigned Unassigned
            min.mwei Min Wei
            Votes:
            0 Vote for this issue
            Watchers:
            4 Start watching this issue

            Dates

              Created:
              Updated:
              Resolved:

              Time Tracking

                Estimated:
                Original Estimate - 12h
                12h
                Remaining:
                Remaining Estimate - 12h
                12h
                Logged:
                Time Spent - Not Specified
                Not Specified