commit c74b2bbcd1ddf1091fb3ef6ce300644d3c304cdf Author: Janos Gub Date: Tue May 9 16:41:20 2017 +0200 HIVE-16618 Clean up javadoc from errors in module hive-common diff --git common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java index d26207d..c4f2297 100644 --- common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java +++ common/src/java/org/apache/hadoop/hive/common/CompressionUtils.java @@ -112,8 +112,8 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) * The output file is created in the output folder, having the same name as the input file, minus * the '.tar' extension. * - * @param inputFile the input .tar file - * @param outputDir the output directory file. + * @param inputFileName the input .tar file + * @param outputDirName the output directory file. * @throws IOException * @throws FileNotFoundException * @@ -131,8 +131,8 @@ public static void zip(String parentDir, String[] inputFiles, String outputFile) * The output file is created in the output folder, having the same name as the input file, minus * the '.tar' extension. * - * @param inputFile the input .tar file - * @param outputDir the output directory file. + * @param inputFileName the input .tar file + * @param outputDirName the output directory file. * @throws IOException * @throws FileNotFoundException * diff --git common/src/java/org/apache/hadoop/hive/common/JavaUtils.java common/src/java/org/apache/hadoop/hive/common/JavaUtils.java index 3916fe3..b224d26 100644 --- common/src/java/org/apache/hadoop/hive/common/JavaUtils.java +++ common/src/java/org/apache/hadoop/hive/common/JavaUtils.java @@ -138,14 +138,14 @@ public static void closeClassLoader(ClassLoader loader) throws IOException { /** * Utility method for ACID to normalize logging info. Matches - * {@link org.apache.hadoop.hive.metastore.api.LockRequest#toString()} + * org.apache.hadoop.hive.metastore.api.LockRequest#toString */ public static String lockIdToString(long extLockId) { return "lockid:" + extLockId; } /** * Utility method for ACID to normalize logging info. Matches - * {@link org.apache.hadoop.hive.metastore.api.LockResponse#toString()} + * org.apache.hadoop.hive.metastore.api.LockResponse#toString */ public static String txnIdToString(long txnId) { return "txnid:" + txnId; diff --git common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java index a9e17c2..2387407 100644 --- common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java +++ common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java @@ -110,18 +110,18 @@ public String getAggregator(Configuration conf) { public static final String STATS_FILE_PREFIX = "tmpstats-"; /** - * @return List of all supported statistics + * List of all supported statistics */ public static final String[] supportedStats = {NUM_FILES,ROW_COUNT,TOTAL_SIZE,RAW_DATA_SIZE}; /** - * @return List of all statistics that need to be collected during query execution. These are + * List of all statistics that need to be collected during query execution. These are * statistics that inherently require a scan of the data. */ public static final String[] statsRequireCompute = new String[] {ROW_COUNT,RAW_DATA_SIZE}; /** - * @return List of statistics that can be collected quickly without requiring a scan of the data. + * List of statistics that can be collected quickly without requiring a scan of the data. */ public static final String[] fastStats = new String[] {NUM_FILES,TOTAL_SIZE}; diff --git common/src/java/org/apache/hadoop/hive/common/ValidCompactorTxnList.java common/src/java/org/apache/hadoop/hive/common/ValidCompactorTxnList.java index 8f55354..c022577 100644 --- common/src/java/org/apache/hadoop/hive/common/ValidCompactorTxnList.java +++ common/src/java/org/apache/hadoop/hive/common/ValidCompactorTxnList.java @@ -32,7 +32,7 @@ * open transaction when choosing which files to compact, but that it still ignores aborted * records when compacting. * - * See {@link org.apache.hadoop.hive.metastore.txn.TxnUtils#createValidCompactTxnList()} for proper + * See org.apache.hadoop.hive.metastore.txn.TxnUtils#createValidCompactTxnList() for proper * way to construct this. */ public class ValidCompactorTxnList extends ValidReadTxnList { @@ -70,7 +70,7 @@ public ValidCompactorTxnList(String value) { super(value); } /** - * Returns {@link org.apache.hadoop.hive.common.ValidTxnList.RangeResponse.ALL} if all txns in + * Returns org.apache.hadoop.hive.common.ValidTxnList.RangeResponse.ALL if all txns in * the range are resolved and RangeResponse.NONE otherwise */ @Override diff --git common/src/java/org/apache/hadoop/hive/common/classification/RetrySemantics.java common/src/java/org/apache/hadoop/hive/common/classification/RetrySemantics.java index 5883b01..f1c3946 100644 --- common/src/java/org/apache/hadoop/hive/common/classification/RetrySemantics.java +++ common/src/java/org/apache/hadoop/hive/common/classification/RetrySemantics.java @@ -28,7 +28,6 @@ * Initially meant for Metastore API when made across a network, i.e. asynchronously where * the response may not reach the caller and thus it cannot know if the operation was actually * performed on the server. - * @see RetryingMetastoreClient */ @InterfaceStability.Evolving @InterfaceAudience.LimitedPrivate("Hive developer") diff --git common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java index 5a991ec..cc2ca6e 100644 --- common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java +++ common/src/java/org/apache/hadoop/hive/common/cli/CommonCliOptions.java @@ -35,8 +35,8 @@ * all your own options or processing instructions), parse, and then use * the resulting information. *

- * See {@link org.apache.hadoop.hive.service.HiveServer} or - * {@link org.apache.hadoop.hive.metastore.HiveMetaStore} + * See org.apache.hadoop.hive.service.HiveServer or + * org.apache.hadoop.hive.metastore.HiveMetaStore * for examples of use. * */ diff --git common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java index 8cf6608..7fd8eda 100644 --- common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java +++ common/src/java/org/apache/hadoop/hive/common/metrics/common/MetricsVariable.java @@ -18,7 +18,7 @@ package org.apache.hadoop.hive.common.metrics.common; /** - * Interface for metrics variables.

For example a the database service could expose the number of + * Interface for metrics variables. For example a the database service could expose the number of * currently active connections. */ public interface MetricsVariable { diff --git common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java index 3b3e918..9face96 100644 --- common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java +++ common/src/java/org/apache/hadoop/hive/common/type/Decimal128.java @@ -1316,15 +1316,13 @@ public void divideDestructive(Decimal128 right, short newScale) { * x - IntegerPart(x / p, resultScale) * p *

* - * @left + * @param left * is x - * @right + * @param right * is p - * @result + * @param result * receives the result - * @scratch - * scratch space to avoid need to create a new object - * @scale + * @param scale * scale of result */ public static void modulo(Decimal128 left, Decimal128 right, Decimal128 result, @@ -1856,7 +1854,7 @@ public void setNullDataValue() { /** * Update the value to a decimal value with the decimal point equal to * val but with the decimal point inserted scale - * digits from the right. Behavior is undefined if scale is > 38 or < 0. + * digits from the right. Behavior is undefined if scale is > 38 or < 0. * * For example, updateFixedPoint(123456789L, (short) 3) changes the target * to the value 123456.789 with scale 3. diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java index dc02803..9084fed 100644 --- common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java +++ common/src/java/org/apache/hadoop/hive/conf/HiveConfUtil.java @@ -151,6 +151,7 @@ public int compare(Map.Entry ent, Map.Entry ent2 * password is through a file which stores the password in clear-text which needs to be readable * by all the consumers and therefore is not supported. * + *
    *
  • If HIVE_SERVER2_JOB_CREDENTIAL_PROVIDER_PATH is set in the hive configuration this method * overrides the MR job configuration property hadoop.security.credential.provider.path with its * value. If not set then it does not change the value of hadoop.security.credential.provider.path @@ -161,7 +162,7 @@ public int compare(Map.Entry ent, Map.Entry ent2 * (2) If password is not set using (1) above we use HADOOP_CREDSTORE_PASSWORD if it is set. * (3) If none of those are set, we do not set any password in the MR task environment. In this * case the hadoop credential provider should use the default password of "none" automatically - * + *
* @param jobConf - job specific configuration */ public static void updateJobCredentialProviders(Configuration jobConf) { diff --git common/src/java/org/apache/hive/common/util/HiveStringUtils.java common/src/java/org/apache/hive/common/util/HiveStringUtils.java index f6dc73a..4a6413a 100644 --- common/src/java/org/apache/hive/common/util/HiveStringUtils.java +++ common/src/java/org/apache/hive/common/util/HiveStringUtils.java @@ -426,7 +426,7 @@ public static String getFormattedTimeWithDiff(DateFormat dateFormat, /** * Splits a comma separated value String, trimming leading and trailing whitespace on each value. - * @param str a comma separated with values + * @param str a comma separated String with values * @return a Collection of String values */ public static Collection getTrimmedStringCollection(String str){ @@ -436,7 +436,7 @@ public static String getFormattedTimeWithDiff(DateFormat dateFormat, /** * Splits a comma separated value String, trimming leading and trailing whitespace on each value. - * @param str a comma separated with values + * @param str a comma separated String with values * @return an array of String values */ public static String[] getTrimmedStrings(String str){ diff --git common/src/java/org/apache/hive/common/util/ShutdownHookManager.java common/src/java/org/apache/hive/common/util/ShutdownHookManager.java index 6585e3b..0b11d10 100644 --- common/src/java/org/apache/hive/common/util/ShutdownHookManager.java +++ common/src/java/org/apache/hive/common/util/ShutdownHookManager.java @@ -89,7 +89,7 @@ public static boolean removeShutdownHook(Runnable shutdownHook) { /** * register file to delete-on-exit hook * - * @see {@link org.apache.hadoop.hive.common.FileUtils#createTempFile} + * {@link org.apache.hadoop.hive.common.FileUtils#createTempFile} */ public static void deleteOnExit(File file) { if (MGR.isShutdownInProgress()) { diff --git common/src/java/org/apache/hive/http/HttpServer.java common/src/java/org/apache/hive/http/HttpServer.java index fd3d457..0bc0032 100644 --- common/src/java/org/apache/hive/http/HttpServer.java +++ common/src/java/org/apache/hive/http/HttpServer.java @@ -216,13 +216,15 @@ public int getPort() { /** * Checks the user has privileges to access to instrumentation servlets. - *

+ *

* If hadoop.security.instrumentation.requires.admin is set to FALSE * (default value) it always returns TRUE. - *

+ *

+ *

* If hadoop.security.instrumentation.requires.admin is set to TRUE * it will check if the current user is in the admin ACLS. If the user is * in the admin ACLs it returns TRUE, otherwise it returns FALSE. + *

* * @param servletContext the servlet context. * @param request the servlet request. diff --git common/src/java/org/apache/hive/http/JMXJsonServlet.java common/src/java/org/apache/hive/http/JMXJsonServlet.java index 7535b26..219db53 100644 --- common/src/java/org/apache/hive/http/JMXJsonServlet.java +++ common/src/java/org/apache/hive/http/JMXJsonServlet.java @@ -70,7 +70,7 @@ *

* The optional get parameter is used to query a specific * attribute of a JMX bean. The format of the URL is - * http://.../jmx?get=MXBeanName::AttributeName + * http://.../jmx?get=MXBeanName::AttributeName *

* For example * @@ -85,7 +85,7 @@ *

* The return format is JSON and in the form *

- *

+ *  
  *  {
  *    "beans" : [
  *      {
@@ -94,7 +94,7 @@
  *      }
  *    ]
  *  }
- *  
+ * *

* The servlet attempts to convert the JMXBeans into JSON. Each * bean's attributes will be converted to a JSON object member.