diff --git ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java index 19172a7..fd92ddf 100644 --- ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java +++ ant/src/org/apache/hadoop/hive/ant/QTestGenTask.java @@ -82,6 +82,8 @@ public class QTestGenTask extends Task { } private List templatePaths = new ArrayList(); + + private String hiveRootDirectory; private String outputDirectory; @@ -157,6 +159,18 @@ public class QTestGenTask extends Task { return template; } + public void setHiveRootDirectory(File hiveRootDirectory) { + try { + this.hiveRootDirectory = hiveRootDirectory.getCanonicalPath(); + } catch (IOException ioe) { + throw new BuildException(ioe); + } + } + + public String getHiveRootDirectory() { + return hiveRootDirectory; + } + public void setTemplatePath(String templatePath) throws Exception { templatePaths.clear(); for (String relativePath : templatePath.split(",")) { @@ -261,43 +275,50 @@ public class QTestGenTask extends Task { List qFiles = new ArrayList(); HashMap qFilesMap = new HashMap(); + File hiveRootDir = null; + File queryDir = null; File outDir = null; File resultsDir = null; File logDir = null; try { - File inpDir = null; if (queryDirectory != null) { - inpDir = new File(queryDirectory); + queryDir = new File(queryDirectory); } if (queryFile != null && !queryFile.equals("")) { // The user may have passed a list of files - comma seperated for (String qFile : queryFile.split(",")) { - if (null != inpDir) { - qFiles.add(new File(inpDir, qFile)); + if (null != queryDir) { + qFiles.add(new File(queryDir, qFile)); } else { qFiles.add(new File(qFile)); } } } else if (queryFileRegex != null && !queryFileRegex.equals("")) { - qFiles.addAll(Arrays.asList(inpDir.listFiles(new QFileRegexFilter(queryFileRegex)))); + qFiles.addAll(Arrays.asList(queryDir.listFiles(new QFileRegexFilter(queryFileRegex)))); } else if (runDisabled != null && runDisabled.equals("true")) { - qFiles.addAll(Arrays.asList(inpDir.listFiles(new DisabledQFileFilter()))); + qFiles.addAll(Arrays.asList(queryDir.listFiles(new DisabledQFileFilter()))); } else { - qFiles.addAll(Arrays.asList(inpDir.listFiles(new QFileFilter()))); + qFiles.addAll(Arrays.asList(queryDir.listFiles(new QFileFilter()))); } if (excludeQueryFile != null && !excludeQueryFile.equals("")) { // Exclude specified query files, comma separated for (String qFile : excludeQueryFile.split(",")) { - if (null != inpDir) { - qFiles.remove(new File(inpDir, qFile)); + if (null != queryDir) { + qFiles.remove(new File(queryDir, qFile)); } else { qFiles.remove(new File(qFile)); } } } + + hiveRootDir = new File(hiveRootDirectory); + if (!hiveRootDir.exists()) { + throw new BuildException("Hive Root Directory " + + hiveRootDir.getCanonicalPath() + " does not exist"); + } Collections.sort(qFiles); for (File qFile : qFiles) { @@ -352,6 +373,8 @@ public class QTestGenTask extends Task { // For each of the qFiles generate the test VelocityContext ctx = new VelocityContext(); ctx.put("className", className); + ctx.put("hiveRootDir", getEscapedCanonicalPath(hiveRootDir)); + ctx.put("queryDir", getEscapedCanonicalPath(queryDir)); ctx.put("qfiles", qFiles); ctx.put("qfilesMap", qFilesMap); ctx.put("resultsDir", getEscapedCanonicalPath(resultsDir)); diff --git bin/ext/beeline.sh bin/ext/beeline.sh index 4195d3d..6c0435d 100644 --- bin/ext/beeline.sh +++ bin/ext/beeline.sh @@ -18,13 +18,12 @@ THISSERVICE=beeline export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} " beeline () { - - CLASS=org.apache.hive.jdbc.beeline.HiveBeeline; + CLASS=org.apache.hive.beeline.BeeLine; execHiveCmd $CLASS "$@" } beeline_help () { - CLASS=org.apache.hive.jdbc.beeline.HiveBeeline; + CLASS=org.apache.hive.beeline.BeeLine; execHiveCmd $CLASS "--help" } diff --git build-common.xml build-common.xml index 92f3694..1dc9618 100644 --- build-common.xml +++ build-common.xml @@ -38,7 +38,9 @@ + + @@ -77,6 +79,7 @@ + @@ -91,13 +94,13 @@ - + - + @@ -181,6 +184,7 @@ + + @@ -226,6 +231,7 @@ + @@ -280,7 +286,7 @@ + + + + + + @@ -407,8 +419,10 @@ + + @@ -416,6 +430,7 @@ + @@ -434,11 +449,12 @@ + + - @@ -480,6 +496,23 @@ + + + + + + + + + + + + + + + + + diff --git build.properties build.properties index 4d9eed8..57233c6 100644 --- build.properties +++ build.properties @@ -84,7 +84,16 @@ test.junit.timeout=43200000 # Use this property to selectively disable tests from the command line: # ant test -Dtest.junit.exclude="**/TestCliDriver.class" # ant test -Dtest.junit.exclude="**/Test*CliDriver.class,**/TestPartitions.class" -test.junit.exclude= +test.junit.exclude="**/TestBeeLineDriver.class, **/TestHiveServer2Concurrency.class" +test.continue.on.failure=false + +test.submodule.exclude= +test.junit.maxmemory=512m + +test.concurrency.num.threads=1 +#test.beelinepositive.exclude=add_part_exist.q,alter1.q,alter2.q,alter4.q,alter5.q,alter_rename_partition.q,alter_rename_partition_authorization.q,archive.q,archive_corrupt.q,archive_multi.q,archive_mr_1806.q,archive_multi_mr_1806.q,authorization_1.q,authorization_2.q,authorization_4.q,authorization_5.q,authorization_6.q,authorization_7.q,ba_table1.q,ba_table2.q,ba_table3.q,ba_table_udfs.q,binary_table_bincolserde.q,binary_table_colserde.q,cluster.q,columnarserde_create_shortcut.q,combine2.q,constant_prop.q,create_nested_type.q,create_or_replace_view.q,create_struct_table.q,create_union_table.q,database.q,database_location.q,database_properties.q,ddltime.q,describe_database_json.q,drop_database_removes_partition_dirs.q,escape1.q,escape2.q,exim_00_nonpart_empty.q,exim_01_nonpart.q,exim_02_00_part_empty.q,exim_02_part.q,exim_03_nonpart_over_compat.q,exim_04_all_part.q,exim_04_evolved_parts.q,exim_05_some_part.q,exim_06_one_part.q,exim_07_all_part_over_nonoverlap.q,exim_08_nonpart_rename.q,exim_09_part_spec_nonoverlap.q,exim_10_external_managed.q,exim_11_managed_external.q,exim_12_external_location.q,exim_13_managed_location.q,exim_14_managed_location_over_existing.q,exim_15_external_part.q,exim_16_part_external.q,exim_17_part_managed.q,exim_18_part_external.q,exim_19_00_part_external_location.q,exim_19_part_external_location.q,exim_20_part_managed_location.q,exim_21_export_authsuccess.q,exim_22_import_exist_authsuccess.q,exim_23_import_part_authsuccess.q,exim_24_import_nonexist_authsuccess.q,global_limit.q,groupby_complex_types.q,groupby_complex_types_multi_single_reducer.q,index_auth.q,index_auto.q,index_auto_empty.q,index_bitmap.q,index_bitmap1.q,index_bitmap2.q,index_bitmap3.q,index_bitmap_auto.q,index_bitmap_rc.q,index_compact.q,index_compact_1.q,index_compact_2.q,index_compact_3.q,index_stale_partitioned.q,init_file.q,input16.q,input16_cc.q,input46.q,input_columnarserde.q,input_dynamicserde.q,input_lazyserde.q,input_testxpath3.q,input_testxpath4.q,insert2_overwrite_partitions.q,insertexternal1.q,join_thrift.q,lateral_view.q,load_binary_data.q,load_exist_part_authsuccess.q,load_nonpart_authsuccess.q,load_part_authsuccess.q,loadpart_err.q,lock1.q,lock2.q,lock3.q,lock4.q,merge_dynamic_partition.q,multi_insert.q,multi_insert_move_tasks_share_dependencies.q,null_column.q,ppd_clusterby.q,query_with_semi.q,rename_column.q,sample6.q,sample_islocalmode_hook.q,set_processor_namespaces.q,show_tables.q,source.q,split_sample.q,str_to_map.q,transform1.q,udaf_collect_set.q,udaf_context_ngrams.q,udaf_histogram_numeric.q,udaf_ngrams.q,udaf_percentile_approx.q,udf_array.q,udf_bitmap_and.q,udf_bitmap_or.q,udf_explode.q,udf_format_number.q,udf_map.q,udf_map_keys.q,udf_map_values.q,udf_max.q,udf_min.q,udf_named_struct.q,udf_percentile.q,udf_printf.q,udf_sentences.q,udf_sort_array.q,udf_split.q,udf_struct.q,udf_substr.q,udf_translate.q,udf_union.q,udf_xpath.q,udtf_stack.q,view.q,virtual_column.q + + # # Ivy Properties @@ -100,7 +109,7 @@ ivy.changingPattern=.*SNAPSHOT ivy.publish.pattern=[artifact]-[revision].[ext] ivy.artifact.retrieve.pattern=[conf]/[artifact]-[revision](-[classifier]).[ext] ivysettings.xml=${ivy.conf.dir}/ivysettings.xml -ivyresolvelog=download-only +ivyresolvelog=default ivy.mvn.repo=http://repo2.maven.org/maven2 ivy_repo_url=${ivy.mvn.repo}/org/apache/ivy/ivy/${ivy.version}/ivy-${ivy.version}.jar hive.ivy.org=org.apache.hive @@ -110,6 +119,7 @@ mvn.pom.dir=${build.dir.hive}/maven/poms mvn.license.dir=${build.dir.hive}/maven/licenses mvn.deploy.id=apache.snapshots.https mvn.deploy.url=https://repository.apache.org/content/repositories/snapshots +ivy.checkmodified=false # # unit test Properties # diff --git build.xml build.xml index ec68408..30ffa34 100644 --- build.xml +++ build.xml @@ -141,7 +141,7 @@ - + @@ -152,7 +152,7 @@ - + @@ -177,6 +177,7 @@ + @@ -319,7 +320,7 @@ - + @@ -420,6 +421,12 @@ + + + + + + @@ -427,10 +434,6 @@ - - - - @@ -490,14 +493,15 @@ + + - @@ -690,6 +694,7 @@ + @@ -914,89 +919,91 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + description="Resolve, Retrieve Ivy-managed artifacts for docs configuration"> + + + - - + + + - - + + @@ -1199,6 +1212,12 @@ + + + + + + @@ -1217,18 +1236,15 @@ - - - - - - + + + @@ -1270,6 +1286,14 @@ output.file="${mvn.pom.dir}/hive-anttasks-${version}.pom.asc" gpg.passphrase="${gpg.passphrase}"/> + + @@ -1294,6 +1318,14 @@ output.file="${mvn.pom.dir}/hive-contrib-${version}.pom.asc" gpg.passphrase="${gpg.passphrase}"/> + + @@ -1326,14 +1358,6 @@ output.file="${mvn.pom.dir}/hive-metastore-${version}.pom.asc" gpg.passphrase="${gpg.passphrase}"/> - - @@ -1342,14 +1366,6 @@ output.file="${mvn.pom.dir}/hive-pdk-${version}.pom.asc" gpg.passphrase="${gpg.passphrase}"/> - - diff --git cli/build.xml cli/build.xml index 6e70d5f..092a68b 100755 --- cli/build.xml +++ cli/build.xml @@ -40,6 +40,11 @@ to call at top-level: ant deploy-contrib compile-core-test + + + + + diff --git cli/ivy.xml cli/ivy.xml index 0d1c64a..4bf543e 100644 --- cli/ivy.xml +++ cli/ivy.xml @@ -30,6 +30,7 @@ + diff --git common/build.xml common/build.xml index 24ad8f5..731f26e 100755 --- common/build.xml +++ common/build.xml @@ -29,6 +29,11 @@ to call at top-level: ant deploy-contrib compile-core-test + + + + + restrictList = new ArrayList(); private static final Map vars = new HashMap(); static { @@ -652,6 +654,22 @@ public class HiveConf extends Configuration { HIVE_DDL_OUTPUT_FORMAT("hive.ddl.output.format", null), HIVE_ENTITY_SEPARATOR("hive.entity.separator", "@"), + HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS("hive.server2.thrift.min.worker.threads", 5), + HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 100), + + HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000), + HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", ""), + + + // HiveServer2 auth configuration + HIVE_SERVER2_AUTHENTICATION("hive.server2.authentication", "NONE"), + HIVE_SERVER2_KERBEROS_KEYTAB("hive.server2.authentication.kerberos.keytab", ""), + HIVE_SERVER2_KERBEROS_PRINCIPAL("hive.server2.authentication.kerberos.principal", ""), + HIVE_SERVER2_PLAIN_LDAP_URL("hive.server2.authentication.ldap.url", null), + HIVE_SERVER2_PLAIN_LDAP_BASEDN("hive.server2.authentication.ldap.baseDN", null), + + HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list", null), + // If this is set all move tasks at the end of a multi-insert query will only begin once all // outputs are ready HIVE_MULTI_INSERT_MOVE_TASKS_SHARE_DEPENDENCIES( @@ -823,6 +841,13 @@ public class HiveConf extends Configuration { return new LoopingByteArrayInputStream(confVarByteArray); } + public void verifyAndSet(String name, String value) throws IllegalArgumentException { + if (restrictList.contains(name)) { + throw new IllegalArgumentException("Cann't modify " + name + " at runtime"); + } + set(name, value); + } + public static int getIntVar(Configuration conf, ConfVars var) { assert (var.valClass == Integer.class); return conf.getInt(var.varname, var.defaultIntVal); @@ -1010,8 +1035,18 @@ public class HiveConf extends Configuration { if (auxJars == null) { auxJars = this.get(ConfVars.HIVEAUXJARS.varname); } + + // setup list of conf vars that are not allowed to change runtime + String restrictListStr = this.get(ConfVars.HIVE_CONF_RESTRICTED_LIST.toString()); + if (restrictListStr != null) { + for (String entry : restrictListStr.split(",")) { + restrictList.add(entry); + } + } + restrictList.add(ConfVars.HIVE_CONF_RESTRICTED_LIST.toString()); } + /** * Apply system properties to this object if the property name is defined in ConfVars * and the value is non-null and not an empty string. diff --git conf/hive-default.xml.template conf/hive-default.xml.template index 91d3bf4..0b8a3c3 100644 --- conf/hive-default.xml.template +++ conf/hive-default.xml.template @@ -1546,7 +1546,6 @@ The number of miliseconds between HMSHandler retry attempts - hive.server.read.socket.timeout 10 @@ -1560,10 +1559,81 @@ - hive.decode.partition.name - false - Whether to show the unquoted partition names in query results. + hive.server2.thrift.min.worker.threads + 5 + Minimum number of Thrift worker threads + + + + hive.server2.thrift.max.worker.threads + 100 + Maximum number of Thrift worker threads + + + + hive.server2.thrift.port + 10000 + Port number of HiveServer2 Thrift interface. + Can be overridden by setting $HIVE_SERVER2_THRIFT_PORT + + + + hive.server2.thrift.bind.host + localhost + Bind host on which to run the HiveServer2 Thrift interface. + Can be overridden by setting $HIVE_SERVER2_THRIFT_BIND_HOST + + + + hive.server2.authentication + NONE + + Client authentication types. + NONE: no authentication check + LDAP: LDAP/AD based authentication + KERBEROS: Kerberos/GSSAPI authentication + + + + + hive.server2.kerberos.principal + + + Kerberos server principal + + + + + hive.server2.kerberos.keytab + + + Kerberos keytab file for server principal + + + + + hive.server2.authentication.ldap.url + + + LDAP connection URL + + + + + + hive.server2.authentication.ldap.baseDN + + + LDAP base DN + + + + + hive.decode.partition.name + false + Whether to show the unquoted partition names in query results. + diff --git contrib/build.xml contrib/build.xml index 277c985..5d33d83 100644 --- contrib/build.xml +++ contrib/build.xml @@ -49,7 +49,8 @@ - --> - - + diff --git eclipse-templates/.classpath eclipse-templates/.classpath index f5a1ceb..8cc34cf 100644 --- eclipse-templates/.classpath +++ eclipse-templates/.classpath @@ -40,7 +40,6 @@ - @@ -49,10 +48,10 @@ - + @@ -86,20 +85,32 @@ + + + + + + + + + + + + @@ -110,17 +121,8 @@ - - - - - - - - - - + diff --git hbase-handler/build.xml hbase-handler/build.xml index 8676ca3..8e23a09 100644 --- hbase-handler/build.xml +++ hbase-handler/build.xml @@ -47,7 +47,8 @@ - - - + diff --git ivy/ivysettings.xml ivy/ivysettings.xml index 0539c3a..5767121 100644 --- ivy/ivysettings.xml +++ ivy/ivysettings.xml @@ -61,7 +61,7 @@ diff --git ivy/libraries.properties ivy/libraries.properties index 8f4949b..ee7e1c0 100644 --- ivy/libraries.properties +++ ivy/libraries.properties @@ -37,6 +37,7 @@ commons-compress.version=1.4.1 commons-configuration.version=1.6 commons-dbcp.version=1.4 commons-httpclient.version=3.0.1 +commons-io.version=2.4 commons-lang.version=2.4 commons-logging.version=1.0.4 commons-logging-api.version=1.0.4 @@ -51,8 +52,6 @@ jdo-api.version=2.3-ec jdom.version=1.1 jetty.version=6.1.26 jline.version=0.9.94 -sqlline.version=1_0_2 -sqlline.branch=1.0.2 json.version=20090211 junit.version=4.10 libfb303.version=0.9.0 @@ -63,6 +62,7 @@ mockito-all.version=1.8.2 rat.version=0.8 slf4j-api.version=1.6.1 slf4j-log4j12.version=1.6.1 +tempus-fugit.version=1.1 velocity.version=1.5 zookeeper.version=3.4.3 javolution.version=5.5.1 diff --git jdbc/ivy.xml jdbc/ivy.xml index 29777a3..9269bd1 100644 --- jdbc/ivy.xml +++ jdbc/ivy.xml @@ -33,8 +33,6 @@ transitive="false"/> - diff --git metastore/build.xml metastore/build.xml index 9e60b66..0e94611 100755 --- metastore/build.xml +++ metastore/build.xml @@ -22,15 +22,6 @@ - - - You must set the 'thrift.home' property! - Executing ${thrift.home}/bin/thrift on ${ant.project.name}/if/hive_metastore.thrift - - - - - diff --git ql/build.xml ql/build.xml index 207cb5b..5b8c39b 100644 --- ql/build.xml +++ ql/build.xml @@ -27,26 +27,22 @@ + + - - - You must set the 'thrift.home' property! - Executing ${thrift.home}/bin/thrift on ${ant.project.name}/if/queryplan.thrift - - - - - + + - - - + hadoopVersion="${hadoopVersion}"/> + + - - - Building Grammar ${src.dir}/org/apache/hadoop/hive/ql/parse/Hive.g .... - - - + + + diff --git ql/ivy.xml ql/ivy.xml index a5dcf6c..a4a79ac 100644 --- ql/ivy.xml +++ ql/ivy.xml @@ -36,6 +36,8 @@ conf="test->default" transitive="false"/> + diff --git ql/src/java/org/apache/hadoop/hive/ql/Driver.java ql/src/java/org/apache/hadoop/hive/ql/Driver.java index b29e444..0976d1a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/Driver.java +++ ql/src/java/org/apache/hadoop/hive/ql/Driver.java @@ -112,6 +112,8 @@ public class Driver implements CommandProcessor { static final private Log LOG = LogFactory.getLog(Driver.class.getName()); static final private LogHelper console = new LogHelper(LOG); + private static final Object compileMonitor = new Object(); + private int maxRows = 100; ByteStream.Output bos = new ByteStream.Output(); @@ -890,7 +892,10 @@ public class Driver implements CommandProcessor { perfLogger.PerfLogBegin(LOG, PerfLogger.DRIVER_RUN); perfLogger.PerfLogBegin(LOG, PerfLogger.TIME_TO_SUBMIT); - int ret = compile(command); + int ret; + synchronized (compileMonitor) { + ret = compile(command); + } if (ret != 0) { releaseLocks(ctx.getHiveLocks()); return new CommandProcessorResponse(ret, errorMessage, SQLState); diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java index 9391acd..23bebc5 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/CopyTask.java @@ -72,13 +72,12 @@ public class CopyTask extends Task implements Serializable { } if (!dstFs.mkdirs(toPath)) { - console - .printError("Cannot make target directory: " + toPath.toString()); + console.printError("Cannot make target directory: " + toPath.toString()); return 2; } for (FileStatus oneSrc : srcs) { - System.out.println("Copying file: " + oneSrc.getPath().toString()); + console.printInfo("Copying file: " + oneSrc.getPath().toString()); LOG.debug("Copying file: " + oneSrc.getPath().toString()); if (!FileUtil.copy(srcFs, oneSrc.getPath(), dstFs, toPath, false, // delete // source diff --git ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java index 64e2c13..912c4ad 100644 --- ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java +++ ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java @@ -116,15 +116,27 @@ public class SetProcessor implements CommandProcessor { return new CommandProcessorResponse(0); } else if (varname.startsWith(SetProcessor.HIVECONF_PREFIX)){ String propName = varname.substring(SetProcessor.HIVECONF_PREFIX.length()); - String error = setConf(varname, propName, varvalue, false); - return new CommandProcessorResponse(error == null ? 0 : 1, error, null); + try { + ss.getConf().verifyAndSet(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue)); + return new CommandProcessorResponse(0); + } catch (IllegalArgumentException e) { + ss.out.println(e.getMessage()); + return new CommandProcessorResponse(-1, e.getMessage(), "42000"); + } } else if (varname.startsWith(SetProcessor.HIVEVAR_PREFIX)) { String propName = varname.substring(SetProcessor.HIVEVAR_PREFIX.length()); ss.getHiveVariables().put(propName, new VariableSubstitution().substitute(ss.getConf(),varvalue)); return new CommandProcessorResponse(0); } else { - String error = setConf(varname, varname, varvalue, true); - return new CommandProcessorResponse(error == null ? 0 : 1, error, null); + String substitutedValue = new VariableSubstitution().substitute(ss.getConf(),varvalue); + try { + ss.getConf().verifyAndSet(varname, substitutedValue ); + ss.getOverriddenConfigurations().put(varname, substitutedValue); + return new CommandProcessorResponse(0); + } catch (IllegalArgumentException e) { + ss.out.println(e.getMessage()); + return new CommandProcessorResponse(-1, e.getMessage(), "42000"); + } } } @@ -209,7 +221,7 @@ public class SetProcessor implements CommandProcessor { } } else { dumpOption(varname); - return new CommandProcessorResponse(0); + return new CommandProcessorResponse(0, null, null, getSchema()); } } diff --git ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java index ae766f7..e6840a4 100644 --- ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java +++ ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java @@ -172,17 +172,14 @@ public class QTestUtil { normalizeNames(file); } } else { - // System.out.println("Trying to match: " + path.getPath()); Matcher m = reduceTok.matcher(path.getName()); if (m.matches()) { String name = m.group(1) + "reduce" + m.group(3); - // System.out.println("Matched new name: " + name); path.renameTo(new File(path.getParent(), name)); } else { m = mapTok.matcher(path.getName()); if (m.matches()) { String name = m.group(1) + "map_" + m.group(3); - // System.out.println("Matched new name: " + name); path.renameTo(new File(path.getParent(), name)); } } @@ -193,6 +190,14 @@ public class QTestUtil { this(outDir, logDir, false, "0.20"); } + public String getOutputDirectory() { + return outDir; + } + + public String getLogDirectory() { + return logDir; + } + private String getHadoopMainVersion(String input) { if (input == null) { return null; diff --git serde/build.xml serde/build.xml index 5f11529..a2c23d1 100644 --- serde/build.xml +++ serde/build.xml @@ -51,27 +51,6 @@ - - - You must set the 'thrift.home' property! - Executing ${thrift.home}/bin/thrift to build java serde Constants... - - - - Executing ${thrift.home}/bin/thrift to build complex.thrift test classes... - - - - Executing ${thrift.home}/bin/thrift to build testthrift.thrift classes... - - - - Executing ${thrift.home}/bin/thrift to build megastruct.thrift classes... - - - - - Generating data/files/complex.seq... diff --git service/build.xml service/build.xml index 19bdb9f..cb0beb4 100644 --- service/build.xml +++ service/build.xml @@ -22,15 +22,6 @@ - - - You must set the 'thrift.home' property! - Executing ${thrift.home}/bin/thrift on ${ant.project.name}/if/hive_service.thrift - - - - - remoteUser = new ThreadLocal () { + @Override + protected synchronized String initialValue() { + return null; + } + }; + + @Override + public String getRemoteUser() { + return remoteUser.get(); + } + /** CallbackHandler for SASL DIGEST-MD5 mechanism */ // This code is pretty much completely based on Hadoop's // SaslRpcServer.SaslDigestCallbackHandler - the only reason we could not @@ -479,12 +510,15 @@ import org.apache.thrift.transport.TTransportFactory; * * This is used on the server side to set the UGI for each specific call. */ - private class TUGIAssumingProcessor implements TProcessor { + protected class TUGIAssumingProcessor implements TProcessor { final TProcessor wrapped; DelegationTokenSecretManager secretManager; - TUGIAssumingProcessor(TProcessor wrapped, DelegationTokenSecretManager secretManager) { + boolean useProxy; + TUGIAssumingProcessor(TProcessor wrapped, DelegationTokenSecretManager secretManager, + boolean useProxy) { this.wrapped = wrapped; this.secretManager = secretManager; + this.useProxy = useProxy; } public boolean process(final TProtocol inProt, final TProtocol outProt) throws TException { @@ -513,17 +547,22 @@ import org.apache.thrift.transport.TTransportFactory; remoteAddress.set(socket.getInetAddress()); UserGroupInformation clientUgi = null; try { - clientUgi = UserGroupInformation.createProxyUser( - endUser, UserGroupInformation.getLoginUser()); - return clientUgi.doAs(new PrivilegedExceptionAction() { - public Boolean run() { - try { - return wrapped.process(inProt, outProt); - } catch (TException te) { - throw new RuntimeException(te); + if (useProxy) { + clientUgi = UserGroupInformation.createProxyUser( + endUser, UserGroupInformation.getLoginUser()); + return clientUgi.doAs(new PrivilegedExceptionAction() { + public Boolean run() { + try { + return wrapped.process(inProt, outProt); + } catch (TException te) { + throw new RuntimeException(te); + } } - } - }); + }); + } else { + remoteUser.set(endUser); + return wrapped.process(inProt, outProt); + } } catch (RuntimeException rte) { if (rte.getCause() instanceof TException) { throw (TException)rte.getCause(); diff --git shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java index ecaa2d7..9b0ec0a 100644 --- shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java +++ shims/src/common/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java @@ -37,6 +37,11 @@ import org.apache.thrift.transport.TTransportFactory; "The current version of Hadoop does not support Authentication"); } + public Client createClientWithConf(String authType) { + throw new UnsupportedOperationException( + "The current version of Hadoop does not support Authentication"); + } + public Server createServer(String keytabFile, String principalConf) throws TTransportException { throw new UnsupportedOperationException( @@ -67,7 +72,9 @@ import org.apache.thrift.transport.TTransportFactory; public static abstract class Server { public abstract TTransportFactory createTransportFactory() throws TTransportException; public abstract TProcessor wrapProcessor(TProcessor processor); + public abstract TProcessor wrapNonAssumingProcessor(TProcessor processor); public abstract InetAddress getRemoteAddress(); + public abstract String getRemoteUser(); public abstract void startDelegationTokenSecretManager(Configuration conf) throws IOException; public abstract String getDelegationToken(String owner, String renewer) throws IOException, InterruptedException;