Uploaded image for project: 'Flink'
  1. Flink
  2. FLINK-20644

Check return type of ScalarFunction eval method shouldn't be void

    XMLWordPrintableJSON

Details

    • Bug
    • Status: Closed
    • Major
    • Resolution: Won't Fix
    • 1.11.1
    • None
    • Table SQL / API
    • groupId:org.apache.flink
      artifactId:flink-table-api-scala-bridge_2.11
      version:1.11.1

    Description

      flink-table-api-scala-bridge_2.11

       

      console:

      SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".SLF4J: Defaulting to no-operation (NOP) logger implementationSLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.ERROR StatusLogger Log4j2 could not find a logging implementation. Please add log4j-core to the classpath. Using SimpleLogger to log to the console.../* 1 // 2 /      public class StreamExecCalc$13 extends org.apache.flink.table.runtime.operators.AbstractProcessStreamOperator/ 3 /          implements org.apache.flink.streaming.api.operators.OneInputStreamOperator {/ 4 // 5 /        private final Object[] references;/ 6 /        private transient org.apache.flink.table.runtime.typeutils.StringDataSerializer typeSerializer$6;/ 7 /        private transient org.apache.flink.table.data.util.DataFormatConverters.StringConverter converter$9;/ 8 /        private transient cn.bicon.tableapitest.udf.ScalarFunctionTest$HashCode function_cn$bicon$tableapitest$udf$ScalarFunctionTest$HashCode$8999e79cc91b971a8777461fb7698c58;/ 9 /        private transient org.apache.flink.table.data.util.DataFormatConverters.GenericConverter converter$12;/ 10 /        org.apache.flink.table.data.BoxedWrapperRowData out = new org.apache.flink.table.data.BoxedWrapperRowData(3);/ 11 /        private final org.apache.flink.streaming.runtime.streamrecord.StreamRecord outElement = new org.apache.flink.streaming.runtime.streamrecord.StreamRecord(null);/ 12 // 13 /        public StreamExecCalc$13(/ 14 /            Object[] references,/ 15 /            org.apache.flink.streaming.runtime.tasks.StreamTask task,/ 16 /            org.apache.flink.streaming.api.graph.StreamConfig config,/ 17 /            org.apache.flink.streaming.api.operators.Output output,/ 18 /            org.apache.flink.streaming.runtime.tasks.ProcessingTimeService processingTimeService) throws Exception {/ 19 /          this.references = references;/ 20 /          typeSerializer$6 = (((org.apache.flink.table.runtime.typeutils.StringDataSerializer) references[0]));/ 21 /          converter$9 = (((org.apache.flink.table.data.util.DataFormatConverters.StringConverter) references[1]));/ 22 /          function_cn$bicon$tableapitest$udf$ScalarFunctionTest$HashCode$8999e79cc91b971a8777461fb7698c58 = (((cn.bicon.tableapitest.udf.ScalarFunctionTest$HashCode) references[2]));/ 23 /          converter$12 = (((org.apache.flink.table.data.util.DataFormatConverters.GenericConverter) references[3]));/ 24 /          this.setup(task, config, output);/ 25 */          if (this instanceof org.apache.flink.streaming.api.operators.AbstractStreamOperator)

      {/* 26 */            ((org.apache.flink.streaming.api.operators.AbstractStreamOperator) this)/* 27 */              .setProcessingTimeService(processingTimeService);/* 28 */          }

      /* 29 /        }/ 30 // 31 /        @Override/ 32 /        public void open() throws Exception {/ 33 /          super.open();/ 34 /          / 35 /          function_cn$bicon$tableapitest$udf$ScalarFunctionTest$HashCode$8999e79cc91b971a8777461fb7698c58.open(new org.apache.flink.table.functions.FunctionContext(getRuntimeContext()));/ 36 /                 / 37 /        }/ 38 // 39 /        @Override/ 40 /        public void processElement(org.apache.flink.streaming.runtime.streamrecord.StreamRecord element) throws Exception {/ 41 /          org.apache.flink.table.data.RowData in1 = (org.apache.flink.table.data.RowData) element.getValue();/ 42 /          / 43 /          org.apache.flink.table.data.binary.BinaryStringData field$5;/ 44 /          boolean isNull$5;/ 45 /          org.apache.flink.table.data.binary.BinaryStringData field$7;/ 46 /          org.apache.flink.table.data.TimestampData field$8;/ 47 /          boolean isNull$8;/ 48 /          org.apache.flink.table.data.binary.BinaryRawValueData result$10;/ 49 /          / 50 /          / 51 /          isNull$8 = in1.isNullAt(2);/ 52 /          field$8 = null;/ 53 */          if (!isNull$8)

      {/* 54 */            field$8 = in1.getTimestamp(2, 3);/* 55 */          }

      /* 56 /          / 57 /          isNull$5 = in1.isNullAt(0);/ 58 /          field$5 = org.apache.flink.table.data.binary.BinaryStringData.EMPTY_UTF8;/ 59 /          if (!isNull$5) {/ 60 /            field$5 = ((org.apache.flink.table.data.binary.BinaryStringData) in1.getString(0));/ 61 /          }/ 62 /          field$7 = field$5;/ 63 /          if (!isNull$5) {/ 64 /            field$7 = (org.apache.flink.table.data.binary.BinaryStringData) (typeSerializer$6.copy(field$7));/ 65 /          }/ 66 /                  / 67 /          / 68 /          out.setRowKind(in1.getRowKind());/ 69 /          / 70 /          / 71 /          / 72 /          / 73 /          if (isNull$5) {/ 74 /            out.setNullAt(0);/ 75 /          } else {/ 76 /            out.setNonPrimitiveValue(0, field$7);/ 77 /          }/ 78 /                    / 79 /          / 80 /          / 81 /          if (isNull$8) {/ 82 /            out.setNullAt(1);/ 83 /          } else {/ 84 /            out.setNonPrimitiveValue(1, field$8);/ 85 /          }/ 86 /                    / 87 /          / 88 /          / 89 /          / 90 /          / 91 /          / 92 /          void javaResult$11 = (void) function_cn$bicon$tableapitest$udf$ScalarFunctionTest$HashCode$8999e79cc91b971a8777461fb7698c58.eval(isNull$5 ? null : ((java.lang.String) converter$9.toExternal((org.apache.flink.table.data.binary.BinaryStringData) field$7)));/ 93 /          result$10 = javaResult$11 == null ? null : ((org.apache.flink.table.data.binary.BinaryRawValueData) converter$12.toInternal((java.lang.Void) javaResult$11));/ 94 /                      / 95 /          / 96 /          / 97 /          / 98 /          if (false) {/ 99 /            out.setNullAt(2);/ 100 /          } else {/ 101 /            out.setNonPrimitiveValue(2, ((org.apache.flink.table.data.binary.BinaryRawValueData) result$10));/ 102 /          }/ 103 /                    / 104 /                  / 105 /          output.collect(outElement.replace(out));/ 106 /          / 107 /          / 108 /        }/ 109 // 110 /        / 111 // 112 /        @Override/ 113 /        public void close() throws Exception {/ 114 /           super.close();/ 115 /          / 116 /          function_cn$bicon$tableapitest$udf$ScalarFunctionTest$HashCode$8999e79cc91b971a8777461fb7698c58.close();/ 117 /                 / 118 /        }/ 119 // 120 /        / 121 /      }/ 122 */    
      Exception in thread "main" java.util.concurrent.ExecutionException: org.apache.flink.runtime.client.JobExecutionException: Job execution failed. at java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357) at java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1895) at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1719) at org.apache.flink.streaming.api.environment.LocalStreamEnvironment.execute(LocalStreamEnvironment.java:74) at org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.java:1699) at org.apache.flink.streaming.api.scala.StreamExecutionEnvironment.execute(StreamExecutionEnvironment.scala:699) at cn.bicon.tableapitest.udf.ScalarFunctionTest$.main(ScalarFunctionTest.scala:78) at cn.bicon.tableapitest.udf.ScalarFunctionTest.main(ScalarFunctionTest.scala)Caused by: org.apache.flink.runtime.client.JobExecutionException: Job execution failed. at org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:147) at org.apache.flink.client.program.PerJobMiniClusterFactory$PerJobMiniClusterJobClient.lambda$getJobExecutionResult$2(PerJobMiniClusterFactory.java:186) at java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:602) at java.util.concurrent.CompletableFuture$UniApply.tryFire(CompletableFuture.java:577) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.runtime.rpc.akka.AkkaInvocationHandler.lambda$invokeRpc$0(AkkaInvocationHandler.java:229) at java.util.concurrent.CompletableFuture.uniWhenComplete(CompletableFuture.java:760) at java.util.concurrent.CompletableFuture$UniWhenComplete.tryFire(CompletableFuture.java:736) at java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:474) at java.util.concurrent.CompletableFuture.complete(CompletableFuture.java:1962) at org.apache.flink.runtime.concurrent.FutureUtils$1.onComplete(FutureUtils.java:892) at akka.dispatch.OnComplete.internal(Future.scala:264) at akka.dispatch.OnComplete.internal(Future.scala:261) at akka.dispatch.japi$CallbackBridge.apply(Future.scala:191) at akka.dispatch.japi$CallbackBridge.apply(Future.scala:188) at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:36) at org.apache.flink.runtime.concurrent.Executors$DirectExecutionContext.execute(Executors.java:74) at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:44) at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:252) at akka.pattern.PromiseActorRef.$bang(AskSupport.scala:572) at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:22) at akka.pattern.PipeToSupport$PipeableFuture$$anonfun$pipeTo$1.applyOrElse(PipeToSupport.scala:21) at scala.concurrent.Future$$anonfun$andThen$1.apply(Future.scala:436) at scala.concurrent.Future$$anonfun$andThen$1.apply(Future.scala:435) at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:36) at akka.dispatch.BatchingExecutor$AbstractBatch.processBatch(BatchingExecutor.scala:55) at akka.dispatch.BatchingExecutor$BlockableBatch$$anonfun$run$1.apply$mcV$sp(BatchingExecutor.scala:91) at akka.dispatch.BatchingExecutor$BlockableBatch$$anonfun$run$1.apply(BatchingExecutor.scala:91) at akka.dispatch.BatchingExecutor$BlockableBatch$$anonfun$run$1.apply(BatchingExecutor.scala:91) at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:72) at akka.dispatch.BatchingExecutor$BlockableBatch.run(BatchingExecutor.scala:90) at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:40) at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(ForkJoinExecutorConfigurator.scala:44) at akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) at akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) at akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) at akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed by NoRestartBackoffTimeStrategy at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:116) at org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:78) at org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:192) at org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:185) at org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:179) at org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:503) at org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:386) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:497) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:284) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:199) at org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:74) at org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:152) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26) at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21) at scala.PartialFunction$class.applyOrElse(PartialFunction.scala:123) at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:170) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) at akka.actor.Actor$class.aroundReceive(Actor.scala:517) at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225) at akka.actor.ActorCell.receiveMessage(ActorCell.scala:592) at akka.actor.ActorCell.invoke(ActorCell.scala:561) at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258) at akka.dispatch.Mailbox.run(Mailbox.scala:225) at akka.dispatch.Mailbox.exec(Mailbox.scala:235) ... 4 moreCaused by: java.lang.RuntimeException: Could not instantiate generated class 'StreamExecCalc$13' at org.apache.flink.table.runtime.generated.GeneratedClass.newInstance(GeneratedClass.java:67) at org.apache.flink.table.runtime.operators.CodeGenOperatorFactory.createStreamOperator(CodeGenOperatorFactory.java:40) at org.apache.flink.streaming.api.operators.StreamOperatorFactoryUtil.createOperator(StreamOperatorFactoryUtil.java:70) at org.apache.flink.streaming.runtime.tasks.OperatorChain.createChainedOperator(OperatorChain.java:470) at org.apache.flink.streaming.runtime.tasks.OperatorChain.createOutputCollector(OperatorChain.java:393) at org.apache.flink.streaming.runtime.tasks.OperatorChain.createChainedOperator(OperatorChain.java:459) at org.apache.flink.streaming.runtime.tasks.OperatorChain.createOutputCollector(OperatorChain.java:393) at org.apache.flink.streaming.runtime.tasks.OperatorChain.createChainedOperator(OperatorChain.java:459) at org.apache.flink.streaming.runtime.tasks.OperatorChain.createOutputCollector(OperatorChain.java:393) at org.apache.flink.streaming.runtime.tasks.OperatorChain.createChainedOperator(OperatorChain.java:459) at org.apache.flink.streaming.runtime.tasks.OperatorChain.createOutputCollector(OperatorChain.java:393) at org.apache.flink.streaming.runtime.tasks.OperatorChain.<init>(OperatorChain.java:155) at org.apache.flink.streaming.runtime.tasks.StreamTask.beforeInvoke(StreamTask.java:453) at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:522) at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:721) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:546) at java.lang.Thread.run(Thread.java:745)Caused by: org.apache.flink.util.FlinkRuntimeException: org.apache.flink.api.common.InvalidProgramException: Table program cannot be compiled. This is a bug. Please file an issue. at org.apache.flink.table.runtime.generated.CompileUtils.compile(CompileUtils.java:68) at org.apache.flink.table.runtime.generated.GeneratedClass.compile(GeneratedClass.java:78) at org.apache.flink.table.runtime.generated.GeneratedClass.newInstance(GeneratedClass.java:65) ... 16 moreCaused by: org.apache.flink.shaded.guava18.com.google.common.util.concurrent.UncheckedExecutionException: org.apache.flink.api.common.InvalidProgramException: Table program cannot be compiled. This is a bug. Please file an issue. at org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2203) at org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache.get(LocalCache.java:3937) at org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4739) at org.apache.flink.table.runtime.generated.CompileUtils.compile(CompileUtils.java:66) ... 18 moreCaused by: org.apache.flink.api.common.InvalidProgramException: Table program cannot be compiled. This is a bug. Please file an issue. at org.apache.flink.table.runtime.generated.CompileUtils.doCompile(CompileUtils.java:81) at org.apache.flink.table.runtime.generated.CompileUtils.lambda$compile$1(CompileUtils.java:66) at org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4742) at org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3527) at org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2319) at org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2282) at org.apache.flink.shaded.guava18.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2197) ... 21 moreCaused by: org.codehaus.commons.compiler.CompileException: Line 92, Column 29: "void" encountered in wrong context at org.codehaus.janino.Parser.compileException(Parser.java:3482) at org.codehaus.janino.Parser.parsePrimary(Parser.java:3015) at org.codehaus.janino.Parser.parseUnaryExpression(Parser.java:2758) at org.codehaus.janino.Parser.parseMultiplicativeExpression(Parser.java:2717) at org.codehaus.janino.Parser.parseAdditiveExpression(Parser.java:2696) at org.codehaus.janino.Parser.parseShiftExpression(Parser.java:2675) at org.codehaus.janino.Parser.parseRelationalExpression(Parser.java:2599) at org.codehaus.janino.Parser.parseEqualityExpression(Parser.java:2573) at org.codehaus.janino.Parser.parseAndExpression(Parser.java:2552) at org.codehaus.janino.Parser.parseExclusiveOrExpression(Parser.java:2531) at org.codehaus.janino.Parser.parseInclusiveOrExpression(Parser.java:2510) at org.codehaus.janino.Parser.parseConditionalAndExpression(Parser.java:2489) at org.codehaus.janino.Parser.parseConditionalOrExpression(Parser.java:2468) at org.codehaus.janino.Parser.parseConditionalExpression(Parser.java:2449) at org.codehaus.janino.Parser.parseAssignmentExpression(Parser.java:2428) at org.codehaus.janino.Parser.parseExpression(Parser.java:2413) at org.codehaus.janino.Parser.parseBlockStatement(Parser.java:1611) at org.codehaus.janino.Parser.parseBlockStatements(Parser.java:1544) at org.codehaus.janino.Parser.parseMethodDeclarationRest(Parser.java:1381) at org.codehaus.janino.Parser.parseClassBodyDeclaration(Parser.java:834) at org.codehaus.janino.Parser.parseClassBody(Parser.java:732) at org.codehaus.janino.Parser.parseClassDeclarationRest(Parser.java:638) at org.codehaus.janino.Parser.parsePackageMemberTypeDeclarationRest(Parser.java:366) at org.codehaus.janino.Parser.parseCompilationUnit(Parser.java:237) at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:216) at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:207) at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80) at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:75) at org.apache.flink.table.runtime.generated.CompileUtils.doCompile(CompileUtils.java:78) ... 27 more
      Process finished with exit code 1

       

      Attachments

        Activity

          People

            Unassigned Unassigned
            shiyu2030 shiyu
            Votes:
            0 Vote for this issue
            Watchers:
            3 Start watching this issue

            Dates

              Created:
              Updated:
              Resolved: