commit 902f0ead06147a8ec55ee12fa3da61984575af83 Author: Janos Gub Date: Tue May 9 16:16:43 2017 +0200 HIVE-16617 Clean up javadoc from errors in module hive-shims diff --git shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java index 9c6901d..c280d49 100644 --- shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java +++ shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java @@ -168,7 +168,6 @@ MiniDFSShim getMiniDfs(Configuration conf, * All updates to jobtracker/resource manager rpc address * in the configuration should be done through this shim * @param conf - * @return */ public void setJobLauncherRpcAddress(Configuration conf, String val); @@ -252,12 +251,12 @@ RecordReader getRecordReader(JobConf job, CombineFileSplit split, Reporter repor /** * For the block locations returned by getLocations() convert them into a Treemap - * by iterating over the list of blockLocation. + * <Offset,blockLocation> by iterating over the list of blockLocation. * Using TreeMap from offset to blockLocation, makes it O(logn) to get a particular * block based upon offset. * @param fs the file system * @param status the file information - * @return TreeMap + * @return TreeMap<Long, BlockLocation> * @throws IOException */ TreeMap getLocationsWithOffset(FileSystem fs, diff --git shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java index d420d09..fd86fed 100644 --- shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java +++ shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java @@ -169,9 +169,9 @@ private boolean loginUserHasCurrentAuthMethod(UserGroupInformation ugi, String s /** * Create a client-side SASL transport that wraps an underlying transport. * - * @param method The authentication method to use. Currently only KERBEROS is + * @param methodStr The authentication method to use. Currently only KERBEROS is * supported. - * @param serverPrincipal The Kerberos principal of the target server. + * @param principalConfig The Kerberos principal of the target server. * @param underlyingTransport The underlying transport mechanism, usually a TSocket. * @param saslProps the sasl properties to create the client with */ diff --git shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java index 4d910d8..4719b85 100644 --- shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java +++ shims/common/src/main/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java @@ -184,7 +184,7 @@ public static void decodeWritable(Writable w, String idStr) throws IOException { /** * Synchronize master key updates / sequence generation for multiple nodes. - * NOTE: {@Link AbstractDelegationTokenSecretManager} keeps currentKey private, so we need + * NOTE: {@link AbstractDelegationTokenSecretManager} keeps currentKey private, so we need * to utilize this "hook" to manipulate the key through the object reference. * This .20S workaround should cease to exist when Hadoop supports token store. */