Uploaded image for project: 'Mahout'
  1. Mahout
  2. MAHOUT-1946

ViennaCL not being picked up by JNI

    XMLWordPrintableJSON

Details

    • Bug
    • Status: Resolved
    • Blocker
    • Resolution: Fixed
    • None
    • 0.13.0
    • None
    • None
    • 0.14.0 Release

    Description

      Using the PR for MAHOUT-1938 but probably in master as well:

      scala> :load ./examples/bin/SparseSparseDrmTimer.mscala
      Loading ./examples/bin/SparseSparseDrmTimer.mscala...
      timeSparseDRMMMul: (m: Int, n: Int, s: Int, para: Int, pctDense: Double, seed: Long)Long

      scala> timeSparseDRMMMul(100,100,100,1,.02,1234L)
      [INFO] Creating org.apache.mahout.viennacl.opencl.GPUMMul solver
      [INFO] Successfully created org.apache.mahout.viennacl.opencl.GPUMMul solver
      gpuRWCW
      17/02/26 13:18:54 ERROR Executor: Exception in task 0.0 in stage 3.0 (TID 3)
      java.lang.UnsatisfiedLinkError: no jniViennaCL in java.library.path
      at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1867)
      at java.lang.Runtime.loadLibrary0(Runtime.java:870)
      at java.lang.System.loadLibrary(System.java:1122)
      at org.bytedeco.javacpp.Loader.loadLibrary(Loader.java:726)
      at org.bytedeco.javacpp.Loader.load(Loader.java:501)
      at org.bytedeco.javacpp.Loader.load(Loader.java:434)
      at org.apache.mahout.viennacl.opencl.javacpp.Context$.loadLib(Context.scala:63)
      at org.apache.mahout.viennacl.opencl.javacpp.Context$.<init>(Context.scala:65)
      at org.apache.mahout.viennacl.opencl.javacpp.Context$.<clinit>(Context.scala)
      at org.apache.mahout.viennacl.opencl.GPUMMul$.org$apache$mahout$viennacl$opencl$GPUMMul$$gpuRWCW(GPUMMul.scala:171)
      at org.apache.mahout.viennacl.opencl.GPUMMul$$anonfun$11.apply(GPUMMul.scala:77)
      at org.apache.mahout.viennacl.opencl.GPUMMul$$anonfun$11.apply(GPUMMul.scala:77)
      at org.apache.mahout.viennacl.opencl.GPUMMul$.apply(GPUMMul.scala:127)
      at org.apache.mahout.viennacl.opencl.GPUMMul$.apply(GPUMMul.scala:33)
      at org.apache.mahout.math.scalabindings.RLikeMatrixOps.$percent$times$percent(RLikeMatrixOps.scala:37)
      at org.apache.mahout.sparkbindings.blas.ABt$.org$apache$mahout$sparkbindings$blas$ABt$$mmulFunc$1(ABt.scala:98)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$6.apply(ABt.scala:113)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$6.apply(ABt.scala:113)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$pairwiseApply$1.apply(ABt.scala:209)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$pairwiseApply$1.apply(ABt.scala:209)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      at org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:191)
      at org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:64)
      at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:73)
      at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
      at org.apache.spark.scheduler.Task.run(Task.scala:89)
      at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:227)
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
      at java.lang.Thread.run(Thread.java:745)
      17/02/26 13:18:54 ERROR SparkUncaughtExceptionHandler: Uncaught exception in thread Thread[Executor task launch worker-0,5,main]
      java.lang.UnsatisfiedLinkError: no jniViennaCL in java.library.path
      at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1867)
      at java.lang.Runtime.loadLibrary0(Runtime.java:870)
      at java.lang.System.loadLibrary(System.java:1122)
      at org.bytedeco.javacpp.Loader.loadLibrary(Loader.java:726)
      at org.bytedeco.javacpp.Loader.load(Loader.java:501)
      at org.bytedeco.javacpp.Loader.load(Loader.java:434)
      at org.apache.mahout.viennacl.opencl.javacpp.Context$.loadLib(Context.scala:63)
      at org.apache.mahout.viennacl.opencl.javacpp.Context$.<init>(Context.scala:65)
      at org.apache.mahout.viennacl.opencl.javacpp.Context$.<clinit>(Context.scala)
      at org.apache.mahout.viennacl.opencl.GPUMMul$.org$apache$mahout$viennacl$opencl$GPUMMul$$gpuRWCW(GPUMMul.scala:171)
      at org.apache.mahout.viennacl.opencl.GPUMMul$$anonfun$11.apply(GPUMMul.scala:77)
      at org.apache.mahout.viennacl.opencl.GPUMMul$$anonfun$11.apply(GPUMMul.scala:77)
      at org.apache.mahout.viennacl.opencl.GPUMMul$.apply(GPUMMul.scala:127)
      at org.apache.mahout.viennacl.opencl.GPUMMul$.apply(GPUMMul.scala:33)
      at org.apache.mahout.math.scalabindings.RLikeMatrixOps.$percent$times$percent(RLikeMatrixOps.scala:37)
      at org.apache.mahout.sparkbindings.blas.ABt$.org$apache$mahout$sparkbindings$blas$ABt$$mmulFunc$1(ABt.scala:98)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$6.apply(ABt.scala:113)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$6.apply(ABt.scala:113)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$pairwiseApply$1.apply(ABt.scala:209)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$pairwiseApply$1.apply(ABt.scala:209)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      at org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:191)
      at org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:64)
      at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:73)
      at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
      at org.apache.spark.scheduler.Task.run(Task.scala:89)
      at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:227)
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
      at java.lang.Thread.run(Thread.java:745)
      17/02/26 13:18:54 WARN TaskSetManager: Lost task 0.0 in stage 3.0 (TID 3, localhost): java.lang.UnsatisfiedLinkError: no jniViennaCL in java.library.path
      at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1867)
      at java.lang.Runtime.loadLibrary0(Runtime.java:870)
      at java.lang.System.loadLibrary(System.java:1122)
      at org.bytedeco.javacpp.Loader.loadLibrary(Loader.java:726)
      at org.bytedeco.javacpp.Loader.load(Loader.java:501)
      at org.bytedeco.javacpp.Loader.load(Loader.java:434)
      at org.apache.mahout.viennacl.opencl.javacpp.Context$.loadLib(Context.scala:63)
      at org.apache.mahout.viennacl.opencl.javacpp.Context$.<init>(Context.scala:65)
      at org.apache.mahout.viennacl.opencl.javacpp.Context$.<clinit>(Context.scala)
      at org.apache.mahout.viennacl.opencl.GPUMMul$.org$apache$mahout$viennacl$opencl$GPUMMul$$gpuRWCW(GPUMMul.scala:171)
      at org.apache.mahout.viennacl.opencl.GPUMMul$$anonfun$11.apply(GPUMMul.scala:77)
      at org.apache.mahout.viennacl.opencl.GPUMMul$$anonfun$11.apply(GPUMMul.scala:77)
      at org.apache.mahout.viennacl.opencl.GPUMMul$.apply(GPUMMul.scala:127)
      at org.apache.mahout.viennacl.opencl.GPUMMul$.apply(GPUMMul.scala:33)
      at org.apache.mahout.math.scalabindings.RLikeMatrixOps.$percent$times$percent(RLikeMatrixOps.scala:37)
      at org.apache.mahout.sparkbindings.blas.ABt$.org$apache$mahout$sparkbindings$blas$ABt$$mmulFunc$1(ABt.scala:98)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$6.apply(ABt.scala:113)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$6.apply(ABt.scala:113)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$pairwiseApply$1.apply(ABt.scala:209)
      at org.apache.mahout.sparkbindings.blas.ABt$$anonfun$pairwiseApply$1.apply(ABt.scala:209)
      at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      at org.apache.spark.util.collection.ExternalSorter.insertAll(ExternalSorter.scala:191)
      at org.apache.spark.shuffle.sort.SortShuffleWriter.write(SortShuffleWriter.scala:64)
      at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:73)
      at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
      at org.apache.spark.scheduler.Task.run(Task.scala:89)
      at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:227)
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
      at java.lang.Thread.run(Thread.java:745)

      17/02/26 13:18:54 ERROR TaskSetManager: Task 0 in stage 3.0 failed 1 times; aborting job
      17/02/26 13:18:54 ERROR ShutdownHookManager: Exception while deleting Spark temp dir: /tmp/spark-34fcf1f5-a8e6-46f8-bdd6-dd1abab2ded1
      java.io.IOException: Failed to delete: /tmp/spark-34fcf1f5-a8e6-46f8-bdd6-dd1abab2ded1
      at org.apache.spark.util.Utils$.deleteRecursively(Utils.scala:929)
      at org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:65)
      at org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:62)
      at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108)
      at org.apache.spark.util.ShutdownHookManager$$anonfun$1.apply$mcV$sp(ShutdownHookManager.scala:62)
      at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:267)
      at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:239)
      at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:239)
      at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:239)
      at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1817)
      at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:239)
      at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:239)
      at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:239)
      at scala.util.Try$.apply(Try.scala:161)
      at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:239)
      at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:218)
      at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:54)

      Attachments

        Activity

          People

            Andrew_Palumbo Andrew Palumbo
            andrew.musselman Andrew Musselman
            Votes:
            0 Vote for this issue
            Watchers:
            3 Start watching this issue

            Dates

              Created:
              Updated:
              Resolved: