Uploaded image for project: 'Apache Drill'
  1. Apache Drill
  2. DRILL-6201

Failed to create input splits: No FileSystem for scheme: maprfs

    XMLWordPrintableJSON

    Details

    • Type: Bug
    • Status: Open
    • Priority: Major
    • Resolution: Unresolved
    • Affects Version/s: None
    • Fix Version/s: None
    • Labels:
      None
    • Environment:

      Mapr cluster - CentOS

      Apache Drill installed in other VM (Isn't a cluster node)

    • Flags:
      Important

      Description

      2018-03-01 14:03:28 ERROR HiveMetadataProvider:294 - Failed to create input splits: No FileSystem for scheme: maprfs
      java.io.IOException: No FileSystem for scheme: maprfs
      at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2644) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2651) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider$1.run(HiveMetadataProvider.java:269) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider$1.run(HiveMetadataProvider.java:262) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at java.security.AccessController.doPrivileged(Native Method) ~[?:1.7.0_161]
      at javax.security.auth.Subject.doAs(Subject.java:421) ~[?:1.7.0_161]
      at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.splitInputWithUGI(HiveMetadataProvider.java:262) [drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.getPartitionInputSplits(HiveMetadataProvider.java:154) [drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits(HiveMetadataProvider.java:176) [drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveScan.getInputSplits(HiveScan.java:122) [drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth(HiveScan.java:171) [drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.physical.ScanPrule.onMatch(ScanPrule.java:41) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch(VolcanoRuleCall.java:228) [calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp(VolcanoPlanner.java:811) [calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.tools.Programs$RuleSetProgram.run(Programs.java:310) [calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform(DefaultSqlHandler.java:400) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel(DefaultSqlHandler.java:429) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan(DefaultSqlHandler.java:169) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan(DrillSqlWorker.java:131) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan(DrillSqlWorker.java:79) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.runSQL(Foreman.java:1017) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:289) [drill-java-exec-1.12.0.jar:1.12.0]
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1152) [?:1.7.0_161]
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:622) [?:1.7.0_161]
      at java.lang.Thread.run(Thread.java:748) [?:1.7.0_161]
      2018-03-01 14:03:28 ERROR HiveMetadataProvider:180 - Failed to get InputSplits
      org.apache.drill.common.exceptions.DrillRuntimeException: Failed to create input splits: No FileSystem for scheme: maprfs
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.splitInputWithUGI(HiveMetadataProvider.java:295) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.getPartitionInputSplits(HiveMetadataProvider.java:154) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits(HiveMetadataProvider.java:176) [drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveScan.getInputSplits(HiveScan.java:122) [drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth(HiveScan.java:171) [drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.physical.ScanPrule.onMatch(ScanPrule.java:41) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch(VolcanoRuleCall.java:228) [calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp(VolcanoPlanner.java:811) [calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.tools.Programs$RuleSetProgram.run(Programs.java:310) [calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform(DefaultSqlHandler.java:400) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel(DefaultSqlHandler.java:429) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan(DefaultSqlHandler.java:169) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan(DrillSqlWorker.java:131) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan(DrillSqlWorker.java:79) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.runSQL(Foreman.java:1017) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:289) [drill-java-exec-1.12.0.jar:1.12.0]
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1152) [?:1.7.0_161]
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:622) [?:1.7.0_161]
      at java.lang.Thread.run(Thread.java:748) [?:1.7.0_161]
      Caused by: java.io.IOException: No FileSystem for scheme: maprfs
      at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2644) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2651) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider$1.run(HiveMetadataProvider.java:269) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider$1.run(HiveMetadataProvider.java:262) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at java.security.AccessController.doPrivileged(Native Method) ~[?:1.7.0_161]
      at javax.security.auth.Subject.doAs(Subject.java:421) ~[?:1.7.0_161]
      at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.splitInputWithUGI(HiveMetadataProvider.java:262) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      ... 18 more
      2018-03-01 14:03:28 ERROR Foreman:593 - SYSTEM ERROR: IOException: No FileSystem for scheme: maprfs

      [Error Id: dc5bcb9a-a350-427d-b443-c60ca9cef294 on 172.28.32.7:31010]
      org.apache.drill.common.exceptions.UserException: SYSTEM ERROR: IOException: No FileSystem for scheme: maprfs

      [Error Id: dc5bcb9a-a350-427d-b443-c60ca9cef294 on 172.28.32.7:31010]
      at org.apache.drill.common.exceptions.UserException$Builder.build(UserException.java:586) [drill-common-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman$ForemanResult.close(Foreman.java:801) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.moveToState(Foreman.java:896) [drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:306) [drill-java-exec-1.12.0.jar:1.12.0]
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1152) [?:1.7.0_161]
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:622) [?:1.7.0_161]
      at java.lang.Thread.run(Thread.java:748) [?:1.7.0_161]
      Caused by: org.apache.drill.exec.work.foreman.ForemanException: Unexpected exception during fragment initialization: Internal error: Error while applying rule Prel.ScanPrule, args [rel#1208:DrillScanRel.LOGICAL.ANY([]).[](table=[hivenormal, dw, dim_order],groupscan=HiveScan [table=Table(dbName:dw, tableName:dim_order), columns=[`id_order`, `code_delivery`, `code_order`, `id_country`, `id_channel`, `id_store`, `id_campaign`, `id_order_status`, `flag_marketplace`, `order_date`, `update_date`, `nm_country`], numPartitions=1, partitions= [Partition(values:[BR])], inputDirectories=[maprfs:/mapr/az-prd-mapr-cluster/NS_GovernedData/dim_order/nm_country=BR]])]
      ... 4 more
      Caused by: java.lang.AssertionError: Internal error: Error while applying rule Prel.ScanPrule, args [rel#1208:DrillScanRel.LOGICAL.ANY([]).[](table=[hivenormal, dw, dim_order],groupscan=HiveScan [table=Table(dbName:dw, tableName:dim_order), columns=[`id_order`, `code_delivery`, `code_order`, `id_country`, `id_channel`, `id_store`, `id_campaign`, `id_order_status`, `flag_marketplace`, `order_date`, `update_date`, `nm_country`], numPartitions=1, partitions= [Partition(values:[BR])], inputDirectories=[maprfs:/mapr/az-prd-mapr-cluster/NS_GovernedData/dim_order/nm_country=BR]])]
      at org.apache.calcite.util.Util.newInternal(Util.java:792) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch(VolcanoRuleCall.java:251) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp(VolcanoPlanner.java:811) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.tools.Programs$RuleSetProgram.run(Programs.java:310) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform(DefaultSqlHandler.java:400) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel(DefaultSqlHandler.java:429) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan(DefaultSqlHandler.java:169) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan(DrillSqlWorker.java:131) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan(DrillSqlWorker.java:79) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.runSQL(Foreman.java:1017) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:289) ~[drill-java-exec-1.12.0.jar:1.12.0]
      ... 3 more
      Caused by: org.apache.drill.common.exceptions.DrillRuntimeException: Failed to get InputSplits
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits(HiveMetadataProvider.java:181) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveScan.getInputSplits(HiveScan.java:122) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth(HiveScan.java:171) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.physical.ScanPrule.onMatch(ScanPrule.java:41) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch(VolcanoRuleCall.java:228) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp(VolcanoPlanner.java:811) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.tools.Programs$RuleSetProgram.run(Programs.java:310) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform(DefaultSqlHandler.java:400) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel(DefaultSqlHandler.java:429) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan(DefaultSqlHandler.java:169) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan(DrillSqlWorker.java:131) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan(DrillSqlWorker.java:79) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.runSQL(Foreman.java:1017) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:289) ~[drill-java-exec-1.12.0.jar:1.12.0]
      ... 3 more
      Caused by: org.apache.drill.common.exceptions.DrillRuntimeException: Failed to create input splits: No FileSystem for scheme: maprfs
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.splitInputWithUGI(HiveMetadataProvider.java:295) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.getPartitionInputSplits(HiveMetadataProvider.java:154) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits(HiveMetadataProvider.java:176) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveScan.getInputSplits(HiveScan.java:122) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth(HiveScan.java:171) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.physical.ScanPrule.onMatch(ScanPrule.java:41) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch(VolcanoRuleCall.java:228) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp(VolcanoPlanner.java:811) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.tools.Programs$RuleSetProgram.run(Programs.java:310) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform(DefaultSqlHandler.java:400) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel(DefaultSqlHandler.java:429) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan(DefaultSqlHandler.java:169) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan(DrillSqlWorker.java:131) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan(DrillSqlWorker.java:79) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.runSQL(Foreman.java:1017) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:289) ~[drill-java-exec-1.12.0.jar:1.12.0]
      ... 3 more
      Caused by: java.io.IOException: No FileSystem for scheme: maprfs
      at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2644) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2651) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:92) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2687) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2669) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:371) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.hadoop.fs.Path.getFileSystem(Path.java:295) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider$1.run(HiveMetadataProvider.java:269) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider$1.run(HiveMetadataProvider.java:262) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at java.security.AccessController.doPrivileged(Native Method) ~[?:1.7.0_161]
      at javax.security.auth.Subject.doAs(Subject.java:421) ~[?:1.7.0_161]
      at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657) ~[hadoop-common-2.7.1.jar:?]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.splitInputWithUGI(HiveMetadataProvider.java:262) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.getPartitionInputSplits(HiveMetadataProvider.java:154) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits(HiveMetadataProvider.java:176) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveScan.getInputSplits(HiveScan.java:122) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth(HiveScan.java:171) ~[drill-storage-hive-core-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.physical.ScanPrule.onMatch(ScanPrule.java:41) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch(VolcanoRuleCall.java:228) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp(VolcanoPlanner.java:811) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.calcite.tools.Programs$RuleSetProgram.run(Programs.java:310) ~[calcite-core-1.4.0-drill-r23.jar:1.4.0-drill-r23]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform(DefaultSqlHandler.java:400) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel(DefaultSqlHandler.java:429) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan(DefaultSqlHandler.java:169) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan(DrillSqlWorker.java:131) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan(DrillSqlWorker.java:79) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.runSQL(Foreman.java:1017) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:289) ~[drill-java-exec-1.12.0.jar:1.12.0]
      ... 3 more
      2018-03-01 14:03:28 ERROR QueryResources:91 - Query from Web UI Failed
      org.apache.drill.common.exceptions.UserRemoteException: SYSTEM ERROR: IOException: No FileSystem for scheme: maprfs

      [Error Id: dc5bcb9a-a350-427d-b443-c60ca9cef294 on 172.28.32.7:31010]

      (org.apache.drill.exec.work.foreman.ForemanException) Unexpected exception during fragment initialization: Internal error: Error while applying rule Prel.ScanPrule, args [rel#1208:DrillScanRel.LOGICAL.ANY([]).[](table=[hivenormal, dw, dim_order],groupscan=HiveScan [table=Table(dbName:dw, tableName:dim_order), columns=[`id_order`, `code_delivery`, `code_order`, `id_country`, `id_channel`, `id_store`, `id_campaign`, `id_order_status`, `flag_marketplace`, `order_date`, `update_date`, `nm_country`], numPartitions=1, partitions= [Partition(values:[BR])], inputDirectories=[maprfs:/mapr/az-prd-mapr-cluster/NS_GovernedData/dim_order/nm_country=BR]])]
      org.apache.drill.exec.work.foreman.Foreman.run():306
      java.util.concurrent.ThreadPoolExecutor.runWorker():1152
      java.util.concurrent.ThreadPoolExecutor$Worker.run():622
      java.lang.Thread.run():748
      Caused By (java.lang.AssertionError) Internal error: Error while applying rule Prel.ScanPrule, args [rel#1208:DrillScanRel.LOGICAL.ANY([]).[](table=[hivenormal, dw, dim_order],groupscan=HiveScan [table=Table(dbName:dw, tableName:dim_order), columns=[`id_order`, `code_delivery`, `code_order`, `id_country`, `id_channel`, `id_store`, `id_campaign`, `id_order_status`, `flag_marketplace`, `order_date`, `update_date`, `nm_country`], numPartitions=1, partitions= [Partition(values:[BR])], inputDirectories=[maprfs:/mapr/az-prd-mapr-cluster/NS_GovernedData/dim_order/nm_country=BR]])]
      org.apache.calcite.util.Util.newInternal():792
      org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch():251
      org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp():811
      org.apache.calcite.tools.Programs$RuleSetProgram.run():310
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform():400
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel():429
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan():169
      org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan():131
      org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan():79
      org.apache.drill.exec.work.foreman.Foreman.runSQL():1017
      org.apache.drill.exec.work.foreman.Foreman.run():289
      java.util.concurrent.ThreadPoolExecutor.runWorker():1152
      java.util.concurrent.ThreadPoolExecutor$Worker.run():622
      java.lang.Thread.run():748
      Caused By (org.apache.drill.common.exceptions.DrillRuntimeException) Failed to get InputSplits
      org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits():181
      org.apache.drill.exec.store.hive.HiveScan.getInputSplits():122
      org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth():171
      org.apache.drill.exec.planner.physical.ScanPrule.onMatch():41
      org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch():228
      org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp():811
      org.apache.calcite.tools.Programs$RuleSetProgram.run():310
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform():400
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel():429
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan():169
      org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan():131
      org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan():79
      org.apache.drill.exec.work.foreman.Foreman.runSQL():1017
      org.apache.drill.exec.work.foreman.Foreman.run():289
      java.util.concurrent.ThreadPoolExecutor.runWorker():1152
      java.util.concurrent.ThreadPoolExecutor$Worker.run():622
      java.lang.Thread.run():748
      Caused By (org.apache.drill.common.exceptions.DrillRuntimeException) Failed to create input splits: No FileSystem for scheme: maprfs
      org.apache.drill.exec.store.hive.HiveMetadataProvider.splitInputWithUGI():295
      org.apache.drill.exec.store.hive.HiveMetadataProvider.getPartitionInputSplits():154
      org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits():176
      org.apache.drill.exec.store.hive.HiveScan.getInputSplits():122
      org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth():171
      org.apache.drill.exec.planner.physical.ScanPrule.onMatch():41
      org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch():228
      org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp():811
      org.apache.calcite.tools.Programs$RuleSetProgram.run():310
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform():400
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel():429
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan():169
      org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan():131
      org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan():79
      org.apache.drill.exec.work.foreman.Foreman.runSQL():1017
      org.apache.drill.exec.work.foreman.Foreman.run():289
      java.util.concurrent.ThreadPoolExecutor.runWorker():1152
      java.util.concurrent.ThreadPoolExecutor$Worker.run():622
      java.lang.Thread.run():748
      Caused By (java.io.IOException) No FileSystem for scheme: maprfs
      org.apache.hadoop.fs.FileSystem.getFileSystemClass():2644
      org.apache.hadoop.fs.FileSystem.createFileSystem():2651
      org.apache.hadoop.fs.FileSystem.access$200():92
      org.apache.hadoop.fs.FileSystem$Cache.getInternal():2687
      org.apache.hadoop.fs.FileSystem$Cache.get():2669
      org.apache.hadoop.fs.FileSystem.get():371
      org.apache.hadoop.fs.Path.getFileSystem():295
      org.apache.drill.exec.store.hive.HiveMetadataProvider$1.run():269
      org.apache.drill.exec.store.hive.HiveMetadataProvider$1.run():262
      java.security.AccessController.doPrivileged():-2
      javax.security.auth.Subject.doAs():421
      org.apache.hadoop.security.UserGroupInformation.doAs():1657
      org.apache.drill.exec.store.hive.HiveMetadataProvider.splitInputWithUGI():262
      org.apache.drill.exec.store.hive.HiveMetadataProvider.getPartitionInputSplits():154
      org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits():176
      org.apache.drill.exec.store.hive.HiveScan.getInputSplits():122
      org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth():171
      org.apache.drill.exec.planner.physical.ScanPrule.onMatch():41
      org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch():228
      org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp():811
      org.apache.calcite.tools.Programs$RuleSetProgram.run():310
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform():400
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel():429
      org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan():169
      org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan():131
      org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan():79
      org.apache.drill.exec.work.foreman.Foreman.runSQL():1017
      org.apache.drill.exec.work.foreman.Foreman.run():289
      java.util.concurrent.ThreadPoolExecutor.runWorker():1152
      java.util.concurrent.ThreadPoolExecutor$Worker.run():622
      java.lang.Thread.run():748

      at org.apache.drill.exec.rpc.AbstractDisposableUserClientConnection.sendResult(AbstractDisposableUserClientConnection.java:85) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman$ForemanResult.close(Foreman.java:822) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.moveToState(Foreman.java:896) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:306) ~[drill-java-exec-1.12.0.jar:1.12.0]
      at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1152) ~[?:1.7.0_161]
      at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:622) ~[?:1.7.0_161]
      at java.lang.Thread.run(Thread.java:748) [?:1.7.0_161]

       

        Attachments

          Activity

            People

            • Assignee:
              Unassigned
              Reporter:
              willianmattos Willian Mattos Ribeiro
            • Votes:
              0 Vote for this issue
              Watchers:
              2 Start watching this issue

              Dates

              • Created:
                Updated: