FAQ
Author: zshao
Date: Wed Sep 2 22:45:42 2009
New Revision: 810724

URL: http://svn.apache.org/viewvc?rev=810724&view=rev
Log:
HIVE-811. Fix Javadocs. (Namit Jain via zshao)

Modified:
hadoop/hive/branches/branch-0.4/CHANGES.txt
hadoop/hive/branches/branch-0.4/build.xml
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObjectKey.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObjectValue.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolver.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java
hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java
hadoop/hive/branches/branch-0.4/serde/build.xml
hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryPrimitive.java
hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryListObjectInspector.java
hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ThriftStructObjectInspector.java
hadoop/hive/branches/branch-0.4/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
hadoop/hive/branches/branch-0.4/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hadoop/hive/branches/branch-0.4/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/CHANGES.txt?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/CHANGES.txt (original)
+++ hadoop/hive/branches/branch-0.4/CHANGES.txt Wed Sep 2 22:45:42 2009
@@ -237,6 +237,8 @@
HIVE-610. Move all properties from jpox.properties to hive-site.xml.
(Prasad Chakka via zshao)

+ HIVE-811. Fix Javadocs. (Namit Jain via zshao)
+
BUG FIXES

HIVE-381. Fix JDBC HiveResultSet's next function.

Modified: hadoop/hive/branches/branch-0.4/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/build.xml?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/build.xml (original)
+++ hadoop/hive/branches/branch-0.4/build.xml Wed Sep 2 22:45:42 2009
@@ -313,7 +313,6 @@
</target>

<target name="javadoc" depends="package" description="Generate javadoc">
-
<mkdir dir="${build.javadoc}"/>
<javadoc
packagenames="org.apache.hadoop.hive.*"
@@ -325,6 +324,7 @@
doctitle="${Name} ${version} API"
bottom="Copyright &amp;copy; ${year} The Apache Software Foundation"
>
+
<packageset dir="ant/src"/>
<packageset dir="hwi/src/java"/>
<packageset dir="hwi/src/test"/>
@@ -344,6 +344,7 @@
<packageset dir="cli/src/java"/>
<packageset dir="ql/src/java"/>
<packageset dir="${build.dir.hive}/ql/gen-java"/>
+ <packageset dir="shims/src/common/java"/>

<link href="${javadoc.link.java}"/>


Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java Wed Sep 2 22:45:42 2009
@@ -241,9 +241,7 @@

/**
* Get the next row. The fetch context is modified appropriately.
- *
- * @param ctx
- * fetch context
+ *
**/
public InspectableObject getNextRow() throws IOException {
try {
@@ -279,8 +277,6 @@
/**
* Clear the context, if anything needs to be done.
*
- * @param ctx
- * fetch context
**/
public void clearFetchContext() throws HiveException {
try {

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObjectKey.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObjectKey.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObjectKey.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObjectKey.java Wed Sep 2 22:45:42 2009
@@ -47,7 +47,6 @@

/**
* @param metadataTag
- * @param objectTypeTag
* @param obj
*/
public MapJoinObjectKey(int metadataTag, ArrayList<Object> obj) {

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObjectValue.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObjectValue.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObjectValue.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObjectValue.java Wed Sep 2 22:45:42 2009
@@ -47,7 +47,6 @@

/**
* @param metadataTag
- * @param objectTypeTag
* @param obj
*/
public MapJoinObjectValue(int metadataTag, ArrayList<ArrayList<Object>> obj) {

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java Wed Sep 2 22:45:42 2009
@@ -41,8 +41,8 @@

/**
* An util class for various Hive file format tasks.
- * {@link #registerOutputFormatSubstitute(Class, Class) and
- * {@link #getOutputFormatSubstitute(Class)} are added for backward
+ * registerOutputFormatSubstitute(Class, Class)
+ * getOutputFormatSubstitute(Class) are added for backward
* compatibility. They return the newly added HiveOutputFormat for the older
* ones.
*

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java Wed Sep 2 22:45:42 2009
@@ -77,9 +77,7 @@
*
* @throws IOException
* If a problem occurs reading from this DataInputStream.
- *
- * @see DataOutput#write(byte[])
- * @see DataOutput#write(byte[], int, int)
+ *
*/
@Override
public final int read(byte[] buffer) throws IOException {
@@ -103,8 +101,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#write(byte[])
- * @see DataOutput#write(byte[], int, int)
*/
@Override
public final int read(byte[] buffer, int offset, int length)
@@ -120,7 +116,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeBoolean(boolean)
*/
public final boolean readBoolean() throws IOException {
int temp = in.read();
@@ -138,7 +133,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeByte(int)
*/
public final byte readByte() throws IOException {
int temp = in.read();
@@ -156,7 +150,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeChar(int)
*/
private int readToBuff(int count) throws IOException {
int offset = 0;
@@ -186,7 +179,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeDouble(double)
*/
public final double readDouble() throws IOException {
return Double.longBitsToDouble(readLong());
@@ -200,7 +192,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeFloat(float)
*/
public final float readFloat() throws IOException {
return Float.intBitsToFloat(readInt());
@@ -217,8 +208,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#write(byte[])
- * @see DataOutput#write(byte[], int, int)
*/
public final void readFully(byte[] buffer) throws IOException {
readFully(buffer, 0, buffer.length);
@@ -241,7 +230,6 @@
* @throws EOFException
* if reaches the end of the stream before enough bytes have been
* read
- * @see java.io.DataInput#readFully(byte[], int, int)
*/
public final void readFully(byte[] buffer, int offset, int length)
throws IOException {
@@ -276,7 +264,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeInt(int)
*/
public final int readInt() throws IOException {
if (readToBuff(4) < 0) {
@@ -345,7 +332,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeLong(long)
*/
public final long readLong() throws IOException {
if (readToBuff(8) < 0) {
@@ -367,7 +353,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeShort(int)
*/
public final short readShort() throws IOException {
if (readToBuff(2) < 0) {
@@ -385,7 +370,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeByte(int)
*/
public final int readUnsignedByte() throws IOException {
int temp = in.read();
@@ -404,7 +388,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeShort(int)
*/
public final int readUnsignedShort() throws IOException {
if (readToBuff(2) < 0) {
@@ -421,7 +404,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeUTF(java.lang.String)
*/
public final String readUTF() throws IOException {
return decodeUTF(readUnsignedShort());
@@ -449,7 +431,6 @@
* @throws IOException
* If a problem occurs reading from this DataInputStream.
*
- * @see DataOutput#writeUTF(java.lang.String)
*/
public static final String readUTF(DataInput in) throws IOException {
return decodeUTF(in.readUnsignedShort(), in);

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRProcContext.java Wed Sep 2 22:45:42 2009
@@ -143,7 +143,7 @@
/**
* @param taskTmpDir
* @param tt_desc
- * @param childSelect
+ * @param rootMapJoinOp
* @param oldMapJoin
*/
public GenMRMapJoinCtx(String taskTmpDir, tableDesc tt_desc,

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/MapJoinProcessor.java Wed Sep 2 22:45:42 2009
@@ -493,7 +493,7 @@
MapJoinOperator currMapJoinOp;

/**
- * @param listMapJoins
+ * @param listMapJoinsNoRed
*/
public MapJoinWalkerCtx(List<MapJoinOperator> listMapJoinsNoRed) {
this.listMapJoinsNoRed = listMapJoinsNoRed;
@@ -509,7 +509,7 @@
}

/**
- * @param listMapJoins the listMapJoins to set
+ * @param listMapJoinsNoRed the listMapJoins to set
*/
public void setListMapJoins(List<MapJoinOperator> listMapJoinsNoRed) {
this.listMapJoinsNoRed = listMapJoinsNoRed;

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java Wed Sep 2 22:45:42 2009
@@ -98,7 +98,7 @@
* @param prunerExpr the pruner expression for the alias
* @param conf for checking whether "strict" mode is on.
* @param alias for generating error message only.
- * @return
+ * @return the partition list for the table that satisfies the partition pruner condition.
* @throws HiveException
*/
public static PrunedPartitionList prune(Table tab, exprNodeDesc prunerExpr,

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Wed Sep 2 22:45:42 2009
@@ -86,21 +86,25 @@
* map from table scan operator to partition pruner
* @param aliasToSamplePruner
* sample pruner list
- * @param loadFileWork
- * list of destination files being loaded
- * @param loadTableWork
- * list of destination tables being loaded
- * @param opParseCtx
- * operator parse context - contains a mapping from operator to
- * operator parse state (row resolver etc.)
* @param topOps
* list of operators for the top query
* @param topSelOps
* list of operators for the selects introduced for column pruning
+ * @param opParseCtx
+ * operator parse context - contains a mapping from operator to
+ * operator parse state (row resolver etc.)
+ * @param joinContext context needed join processing (map join specifically)
+ * @param topToTable the top tables being processed
+ * @param loadTableWork
+ * list of destination tables being loaded
+ * @param loadFileWork
+ * list of destination files being loaded
+ * @param ctx parse context
+ * @param idToTableNameMap
+ * @param destTableId
+ * @param uCtx
* @param listMapJoinOpsNoReducer
* list of map join operators with no reducer
- * @param hasNonPartCols
- * the query has non partition columns
*/
public ParseContext(HiveConf conf, QB qb, ASTNode ast,
HashMap<String, ASTPartitionPruner> aliasToPruner,

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java Wed Sep 2 22:45:42 2009
@@ -153,7 +153,7 @@
}

/**
- * @param mapSidejoin the mapSidejoin to set
+ * @param mapSideJoin the mapSidejoin to set
*/
public void setMapSideJoin(boolean mapSideJoin) {
this.mapSideJoin = mapSideJoin;

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolver.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolver.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolver.java Wed Sep 2 22:45:42 2009
@@ -27,7 +27,8 @@
public interface ConditionalResolver {
/**
* All conditional resolvers implement this interface
- * @param pctx opaque context
+ * @param conf configuration
+ * @param ctx opaque context
* @return position of the task
*/
public int getTaskId(HiveConf conf, Object ctx);

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Wed Sep 2 22:45:42 2009
@@ -135,7 +135,7 @@
}

/**
- * @param createTblDesc the createTblDesc to set
+ * @param createTblLikeDesc the createTblDesc to set
*/
public void setCreateTblLikeDesc(createTableLikeDesc createTblLikeDesc) {
this.createTblLikeDesc = createTblLikeDesc;
@@ -210,7 +210,7 @@
}

/**
- * @param showFuncsDesc the showFuncsDesc to set
+ * @param descFuncDesc the showFuncsDesc to set
*/
public void setDescFuncDesc(descFunctionDesc descFuncDesc) {
this.descFunctionDesc = descFuncDesc;

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java Wed Sep 2 22:45:42 2009
@@ -135,7 +135,7 @@
}

/**
- * @param mapredLocalWork the mapredLocalWork to set
+ * @param mapLocalWork the mapredLocalWork to set
*/
public void setMapLocalWork(final mapredLocalWork mapLocalWork) {
this.mapLocalWork = mapLocalWork;

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFPosMod.java Wed Sep 2 22:45:42 2009
@@ -32,8 +32,8 @@
/**
* class for computing positive modulo.
* Used for positive_mod command in Cli
- * @ See {@link org.apache.hadoop.hive.ql.udf.UDFOPMod}
- * @ See {@link org.apache.hadoop.hive.ql.exec.FunctionRegistry}
+ * See {org.apache.hadoop.hive.ql.udf.UDFOPMod}
+ * See {org.apache.hadoop.hive.ql.exec.FunctionRegistry}
*/
@description(
name = "pmod",

Modified: hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java (original)
+++ hadoop/hive/branches/branch-0.4/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java Wed Sep 2 22:45:42 2009
@@ -117,7 +117,6 @@
/**
* This function will be called by GroupByOperator when it sees a new input row.
* @param agg The object to store the aggregation result.
- * @param parameterOIs The row, can be inspected by the OIs passed in init().
*/
public Object evaluate(AggregationBuffer agg) throws HiveException {
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {

Modified: hadoop/hive/branches/branch-0.4/serde/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/serde/build.xml?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/serde/build.xml (original)
+++ hadoop/hive/branches/branch-0.4/serde/build.xml Wed Sep 2 22:45:42 2009
@@ -62,6 +62,7 @@
<arg line="--gen py --gen cpp --gen php --gen java -o ${src.dir} if/serde.thrift " />
</exec>
<echo>Executing thrift (which needs to be in your path) to build complex.thrift test classes... </echo>
+ <echo message="thrift --gen java -o ${src.dir} if/test/complex.thrift" />
<exec executable="thrift" failonerror="true" dir=".">
<arg line="--gen java -o ${src.dir} if/test/complex.thrift " />
</exec>

Modified: hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryPrimitive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryPrimitive.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryPrimitive.java (original)
+++ hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryPrimitive.java Wed Sep 2 22:45:42 2009
@@ -19,15 +19,15 @@

import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.io.Writable;
+
/**
- * Defines a LazyBianryPrimitive.
+ * Defines a LazyBinaryPrimitive.
*
- * {@data} will be initialized to corresponding types in
- * different LazyBinary primitive classes. For example, {@data} will
+ * data will be initialized to corresponding types in
+ * different LazyBinary primitive classes. For example, data will
* be a BooleanWritable in the LazyBinaryBoolean class.
*
* There is no null flag any more,
- * @see {@link LazyBinaryObject#init(ByteArrayRef, int, int)}
*
*/
public abstract class LazyBinaryPrimitive<OI extends ObjectInspector,

Modified: hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java (original)
+++ hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java Wed Sep 2 22:45:42 2009
@@ -80,7 +80,6 @@

/**
* Initialize the SerDe with configuration and table information
- * @see SerDe#initialize(Configuration, Properties)
*/
@Override
public void initialize(Configuration conf, Properties tbl)
@@ -112,7 +111,6 @@

/**
* Returns the ObjectInspector for the row.
- * @see Deserializer#getObjectInspector()
*/
@Override
public ObjectInspector getObjectInspector() throws SerDeException {
@@ -121,7 +119,6 @@

/**
* Returns the Writable Class after serialization.
- * @see Serializer#getSerializedClass()
*/
@Override
public Class<? extends Writable> getSerializedClass() {
@@ -133,7 +130,6 @@

/**
* Deserialize a table record to a lazybinary struct.
- * @see Deserializer#deserialize(Writable)
*/
@Override
public Object deserialize(Writable field) throws SerDeException {
@@ -168,7 +164,6 @@

/**
* Serialize an object to a byte buffer in a binary compact way.
- * @see Serializer#serialize(Object, ObjectInspector)
*/
@Override
public Writable serialize(Object obj, ObjectInspector objInspector)
@@ -242,8 +237,6 @@
* @param byteStream the byte stream storing the serialization data
* @param obj the object to serialize
* @param objInspector the object inspector
- * @see LazyBinaryUtils#checkObjectByteInfo(ObjectInspector, byte[], int, LazyBinaryUtils.RecordInfo) for
- * how the byte sizes of different object are decoded.
*/
private void serialize(Output byteStream,
Object obj, ObjectInspector objInspector) {

Modified: hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java (original)
+++ hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinaryUtils.java Wed Sep 2 22:45:42 2009
@@ -118,7 +118,6 @@
* @param bytes bytes arrays store the table row
* @param offset offset of this field
* @param recordInfo modify this byteinfo object and return it
- * @return size and offset in bytes of this field
*/
public static void checkObjectByteInfo(ObjectInspector objectInspector, byte[] bytes, int offset, RecordInfo recordInfo) {
Category category = objectInspector.getCategory();
@@ -177,7 +176,6 @@

/**
* A zero-compressed encoded long
- * @see WritableUtils#readVLong(java.io.DataInput)
*/
public static class VLong {
public VLong() {
@@ -193,7 +191,6 @@
* @param bytes the byte array
* @param offset offset of the array to read from
* @param vlong storing the deserialized long and its size in byte
- * @see WritableUtils#readVLong(java.io.DataInput)
*/
public static void readVLong(byte[] bytes, int offset, VLong vlong) {
byte firstByte = bytes[offset];
@@ -213,7 +210,6 @@

/**
* A zero-compressed encoded integer
- * @see WritableUtils#readVInt(java.io.DataInput)
*/
public static class VInt {
public VInt() {
@@ -228,8 +224,7 @@
* Reads a zero-compressed encoded int from a byte array and returns it.
* @param bytes the byte array
* @param offset offset of the array to read from
- * @param vint storing the deserialized int and its size in byte
- * @see WritableUtils#readVInt(java.io.DataInput)
+ * @param vInt storing the deserialized int and its size in byte
*/
public static void readVInt(byte[] bytes, int offset, VInt vInt) {
byte firstByte = bytes[offset];
@@ -251,7 +246,6 @@
* Writes a zero-compressed encoded int to a byte array.
* @param byteStream the byte array/stream
* @param i the int
- * @see LazyBinaryUtils#writeVLong(Output, long)
*/
public static void writeVInt(Output byteStream, int i) {
writeVLong(byteStream, i);
@@ -261,7 +255,6 @@
* Write a zero-compressed encoded long to a byte array.
* @param byteStream the byte array/stream
* @param l the long
- * @see WritableUtils#writeVLong(java.io.DataOutput, long)
*/
public static void writeVLong(Output byteStream, long l) {
if (l >= -112 && l <= 127) {

Modified: hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryListObjectInspector.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryListObjectInspector.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryListObjectInspector.java (original)
+++ hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/objectinspector/LazyBinaryListObjectInspector.java Wed Sep 2 22:45:42 2009
@@ -25,7 +25,6 @@

/**
* ObjectInspector for LazyBinaryList
- * @see LazyBinaryList
*/
public class LazyBinaryListObjectInspector extends StandardListObjectInspector {


Modified: hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ThriftStructObjectInspector.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ThriftStructObjectInspector.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ThriftStructObjectInspector.java (original)
+++ hadoop/hive/branches/branch-0.4/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ThriftStructObjectInspector.java Wed Sep 2 22:45:42 2009
@@ -27,7 +27,7 @@
ReflectionStructObjectInspector {

public boolean shouldIgnoreField(String name) {
- return "__isset".equals(name);
+ return name != null && name.startsWith("__isset");
}

public boolean equals(Object b) {

Modified: hadoop/hive/branches/branch-0.4/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/service/src/java/org/apache/hadoop/hive/service/HiveServer.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/service/src/java/org/apache/hadoop/hive/service/HiveServer.java (original)
+++ hadoop/hive/branches/branch-0.4/service/src/java/org/apache/hadoop/hive/service/HiveServer.java Wed Sep 2 22:45:42 2009
@@ -96,7 +96,7 @@
/**
* Executes a query.
*
- * @param query HiveQL query to execute
+ * @param cmd HiveQL query to execute
*/
public void execute(String cmd) throws HiveServerException, TException {
HiveServerHandler.LOG.info("Running the query: " + cmd);

Modified: hadoop/hive/branches/branch-0.4/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hadoop/hive/branches/branch-0.4/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=810724&r1=810723&r2=810724&view=diff
==============================================================================
--- hadoop/hive/branches/branch-0.4/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hadoop/hive/branches/branch-0.4/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Wed Sep 2 22:45:42 2009
@@ -45,7 +45,7 @@
/**
* Calls fs.deleteOnExit(path) if such a function exists.
*
- * @returns true if the call was successful
+ * @return true if the call was successful
*/
public boolean fileSystemDeleteOnExit(FileSystem fs, Path path) throws IOException;

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedSep 2, '09 at 10:46p
activeSep 2, '09 at 10:46p
posts1
users1
websitehive.apache.org

1 user in discussion

Zshao: 1 post

People

Translate

site design / logo © 2021 Grokbase