FAQ
http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java
new file mode 100644
index 0000000..0ad8c02
--- /dev/null
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/Type.java
@@ -0,0 +1,438 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.thrift;
+
+import java.sql.DatabaseMetaData;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hive.service.rpc.thrift.TTypeId;
+/**
+ * Type.
+ *
+ */
+public enum Type {
+ NULL_TYPE("VOID",
+ java.sql.Types.NULL,
+ TTypeId.NULL_TYPE),
+ BOOLEAN_TYPE("BOOLEAN",
+ java.sql.Types.BOOLEAN,
+ TTypeId.BOOLEAN_TYPE),
+ TINYINT_TYPE("TINYINT",
+ java.sql.Types.TINYINT,
+ TTypeId.TINYINT_TYPE),
+ SMALLINT_TYPE("SMALLINT",
+ java.sql.Types.SMALLINT,
+ TTypeId.SMALLINT_TYPE),
+ INT_TYPE("INT",
+ java.sql.Types.INTEGER,
+ TTypeId.INT_TYPE),
+ BIGINT_TYPE("BIGINT",
+ java.sql.Types.BIGINT,
+ TTypeId.BIGINT_TYPE),
+ FLOAT_TYPE("FLOAT",
+ java.sql.Types.FLOAT,
+ TTypeId.FLOAT_TYPE),
+ DOUBLE_TYPE("DOUBLE",
+ java.sql.Types.DOUBLE,
+ TTypeId.DOUBLE_TYPE),
+ STRING_TYPE("STRING",
+ java.sql.Types.VARCHAR,
+ TTypeId.STRING_TYPE),
+ CHAR_TYPE("CHAR",
+ java.sql.Types.CHAR,
+ TTypeId.CHAR_TYPE,
+ true, false, false),
+ VARCHAR_TYPE("VARCHAR",
+ java.sql.Types.VARCHAR,
+ TTypeId.VARCHAR_TYPE,
+ true, false, false),
+ DATE_TYPE("DATE",
+ java.sql.Types.DATE,
+ TTypeId.DATE_TYPE),
+ TIMESTAMP_TYPE("TIMESTAMP",
+ java.sql.Types.TIMESTAMP,
+ TTypeId.TIMESTAMP_TYPE),
+ INTERVAL_YEAR_MONTH_TYPE("INTERVAL_YEAR_MONTH",
+ java.sql.Types.OTHER,
+ TTypeId.INTERVAL_YEAR_MONTH_TYPE),
+ INTERVAL_DAY_TIME_TYPE("INTERVAL_DAY_TIME",
+ java.sql.Types.OTHER,
+ TTypeId.INTERVAL_DAY_TIME_TYPE),
+ BINARY_TYPE("BINARY",
+ java.sql.Types.BINARY,
+ TTypeId.BINARY_TYPE),
+ DECIMAL_TYPE("DECIMAL",
+ java.sql.Types.DECIMAL,
+ TTypeId.DECIMAL_TYPE,
+ true, false, false),
+ ARRAY_TYPE("ARRAY",
+ java.sql.Types.ARRAY,
+ TTypeId.ARRAY_TYPE,
+ true, true),
+ MAP_TYPE("MAP",
+ java.sql.Types.JAVA_OBJECT,
+ TTypeId.MAP_TYPE,
+ true, true),
+ STRUCT_TYPE("STRUCT",
+ java.sql.Types.STRUCT,
+ TTypeId.STRUCT_TYPE,
+ true, false),
+ UNION_TYPE("UNIONTYPE",
+ java.sql.Types.OTHER,
+ TTypeId.UNION_TYPE,
+ true, false),
+ USER_DEFINED_TYPE("USER_DEFINED",
+ java.sql.Types.OTHER,
+ TTypeId.USER_DEFINED_TYPE,
+ true, false);
+
+ private final String name;
+ private final TTypeId tType;
+ private final int javaSQLType;
+ private final boolean isQualified;
+ private final boolean isComplex;
+ private final boolean isCollection;
+
+ Type(String name, int javaSQLType, TTypeId tType, boolean isQualified, boolean isComplex,
+ boolean isCollection) {
+ this.name = name;
+ this.javaSQLType = javaSQLType;
+ this.tType = tType;
+ this.isQualified = isQualified;
+ this.isComplex = isComplex;
+ this.isCollection = isCollection;
+ }
+
+ Type(String name, int javaSQLType, TTypeId tType, boolean isComplex, boolean isCollection) {
+ this(name, javaSQLType, tType, false, isComplex, isCollection);
+ }
+
+ Type(String name, int javaSqlType, TTypeId tType) {
+ this(name, javaSqlType, tType, false, false, false);
+ }
+
+ public boolean isPrimitiveType() {
+ return !isComplex;
+ }
+
+ public boolean isQualifiedType() {
+ return isQualified;
+ }
+
+ public boolean isComplexType() {
+ return isComplex;
+ }
+
+ public boolean isCollectionType() {
+ return isCollection;
+ }
+
+ public static Type getType(TTypeId tType) {
+ for (Type type : values()) {
+ if (tType.equals(type.tType)) {
+ return type;
+ }
+ }
+ throw new IllegalArgumentException("Unrecognized Thrift TTypeId value: " + tType);
+ }
+
+ public static Type getType(String name) {
+ if (name == null) {
+ throw new IllegalArgumentException("Invalid type name: null");
+ }
+ for (Type type : values()) {
+ if (name.equalsIgnoreCase(type.name)) {
+ return type;
+ } else if (type.isQualifiedType() || type.isComplexType()) {
+ if (name.toUpperCase().startsWith(type.name)) {
+ return type;
+ }
+ }
+ }
+ throw new IllegalArgumentException("Unrecognized type name: " + name);
+ }
+
+ /**
+ * Convert TypeInfo to appropriate Type
+ * @param typeInfo
+ * @return
+ */
+ public static Type getType(TypeInfo typeInfo) {
+ switch (typeInfo.getCategory()) {
+ case PRIMITIVE: {
+ PrimitiveTypeInfo pTypeInfo = (PrimitiveTypeInfo) typeInfo;
+ switch (pTypeInfo.getPrimitiveCategory()) {
+ case VOID: {
+ return Type.NULL_TYPE;
+ }
+ case BOOLEAN: {
+ return Type.BOOLEAN_TYPE;
+ }
+ // Double check if this is the right mapping
+ case BYTE: {
+ return Type.BINARY_TYPE;
+ }
+ // Double check if this is the right mapping
+ case SHORT: {
+ return Type.SMALLINT_TYPE;
+ }
+ case INT: {
+ return Type.INT_TYPE;
+ }
+ // Double check if this is the right mapping
+ case LONG: {
+ return Type.BIGINT_TYPE;
+ }
+ case FLOAT: {
+ return Type.FLOAT_TYPE;
+ }
+ case DOUBLE: {
+ return Type.DOUBLE_TYPE;
+ }
+ case STRING: {
+ return Type.STRING_TYPE;
+ }
+ case CHAR: {
+ return Type.CHAR_TYPE;
+ }
+ case VARCHAR: {
+ return Type.VARCHAR_TYPE;
+ }
+ case BINARY: {
+ return Type.BINARY_TYPE;
+ }
+ case DATE: {
+ return Type.DATE_TYPE;
+ }
+ case TIMESTAMP: {
+ return Type.TIMESTAMP_TYPE;
+ }
+ case INTERVAL_YEAR_MONTH: {
+ return Type.INTERVAL_YEAR_MONTH_TYPE;
+ }
+ case INTERVAL_DAY_TIME: {
+ return Type.INTERVAL_DAY_TIME_TYPE;
+ }
+ case DECIMAL: {
+ return Type.DECIMAL_TYPE;
+ }
+ default: {
+ throw new RuntimeException("Unrecognized type: " + pTypeInfo.getPrimitiveCategory());
+ }
+ }
+ }
+ // Double check if this is the right mapping
+ case LIST: {
+ return Type.STRING_TYPE;
+ }
+ case MAP: {
+ return Type.MAP_TYPE;
+ }
+ case STRUCT: {
+ return Type.STRUCT_TYPE;
+ }
+ case UNION: {
+ return Type.UNION_TYPE;
+ }
+ default: {
+ throw new RuntimeException("Unrecognized type: " + typeInfo.getCategory());
+ }
+ }
+ }
+
+ /**
+ * Radix for this type (typically either 2 or 10)
+ * Null is returned for data types where this is not applicable.
+ */
+ public Integer getNumPrecRadix() {
+ if (this.isNumericType()) {
+ return 10;
+ }
+ return null;
+ }
+
+ /**
+ * Maximum precision for numeric types.
+ * Returns null for non-numeric types.
+ * @return
+ */
+ public Integer getMaxPrecision() {
+ switch (this) {
+ case TINYINT_TYPE:
+ return 3;
+ case SMALLINT_TYPE:
+ return 5;
+ case INT_TYPE:
+ return 10;
+ case BIGINT_TYPE:
+ return 19;
+ case FLOAT_TYPE:
+ return 7;
+ case DOUBLE_TYPE:
+ return 15;
+ case DECIMAL_TYPE:
+ return HiveDecimal.MAX_PRECISION;
+ default:
+ return null;
+ }
+ }
+
+ public boolean isNumericType() {
+ switch (this) {
+ case TINYINT_TYPE:
+ case SMALLINT_TYPE:
+ case INT_TYPE:
+ case BIGINT_TYPE:
+ case FLOAT_TYPE:
+ case DOUBLE_TYPE:
+ case DECIMAL_TYPE:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * Prefix used to quote a literal of this type (may be null)
+ */
+ public String getLiteralPrefix() {
+ return null;
+ }
+
+ /**
+ * Suffix used to quote a literal of this type (may be null)
+ * @return
+ */
+ public String getLiteralSuffix() {
+ return null;
+ }
+
+ /**
+ * Can you use NULL for this type?
+ * @return
+ * DatabaseMetaData.typeNoNulls - does not allow NULL values
+ * DatabaseMetaData.typeNullable - allows NULL values
+ * DatabaseMetaData.typeNullableUnknown - nullability unknown
+ */
+ public Short getNullable() {
+ // All Hive types are nullable
+ return DatabaseMetaData.typeNullable;
+ }
+
+ /**
+ * Is the type case sensitive?
+ * @return
+ */
+ public Boolean isCaseSensitive() {
+ switch (this) {
+ case STRING_TYPE:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * Parameters used in creating the type (may be null)
+ * @return
+ */
+ public String getCreateParams() {
+ return null;
+ }
+
+ /**
+ * Can you use WHERE based on this type?
+ * @return
+ * DatabaseMetaData.typePredNone - No support
+ * DatabaseMetaData.typePredChar - Only support with WHERE .. LIKE
+ * DatabaseMetaData.typePredBasic - Supported except for WHERE .. LIKE
+ * DatabaseMetaData.typeSearchable - Supported for all WHERE ..
+ */
+ public Short getSearchable() {
+ if (isPrimitiveType()) {
+ return DatabaseMetaData.typeSearchable;
+ }
+ return DatabaseMetaData.typePredNone;
+ }
+
+ /**
+ * Is this type unsigned?
+ * @return
+ */
+ public Boolean isUnsignedAttribute() {
+ if (isNumericType()) {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Can this type represent money?
+ * @return
+ */
+ public Boolean isFixedPrecScale() {
+ return false;
+ }
+
+ /**
+ * Can this type be used for an auto-increment value?
+ * @return
+ */
+ public Boolean isAutoIncrement() {
+ return false;
+ }
+
+ /**
+ * Localized version of type name (may be null).
+ * @return
+ */
+ public String getLocalizedName() {
+ return null;
+ }
+
+ /**
+ * Minimum scale supported for this type
+ * @return
+ */
+ public Short getMinimumScale() {
+ return 0;
+ }
+
+ /**
+ * Maximum scale supported for this type
+ * @return
+ */
+ public Short getMaximumScale() {
+ return 0;
+ }
+
+ public TTypeId toTType() {
+ return tType;
+ }
+
+ public int toJavaSQLType() {
+ return javaSQLType;
+ }
+
+ public String getName() {
+ return name;
+ }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
index 70dc181..e015e06 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/TypeInfo.java
@@ -26,11 +26,15 @@ import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
   * Stores information about a type. Always use the TypeInfoFactory to create new
   * TypeInfo objects.
   *
- * We support 8 categories of types: 1. Primitive objects (String, Number, etc)
- * 2. List objects (a list of objects of a single type) 3. Map objects (a map
- * from objects of one type to objects of another type) 4. Struct objects (a
- * list of fields with names and their own types) 5. Union objects
- * 6. Decimal objects 7. Char objects 8. Varchar objects
+ * We support 8 categories of types:
+ * 1. Primitive objects (String, Number, etc)
+ * 2. List objects (a list of objects of a single type)
+ * 3. Map objects (a map from objects of one type to objects of another type)
+ * 4. Struct objects (a list of fields with names and their own types)
+ * 5. Union objects
+ * 6. Decimal objects
+ * 7. Char objects
+ * 8. Varchar objects
   */
  public abstract class TypeInfo implements Serializable {


http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service-rpc/if/TCLIService.thrift
----------------------------------------------------------------------
diff --git a/service-rpc/if/TCLIService.thrift b/service-rpc/if/TCLIService.thrift
index aa28b6e..674530d 100644
--- a/service-rpc/if/TCLIService.thrift
+++ b/service-rpc/if/TCLIService.thrift
@@ -402,6 +402,8 @@ struct TRowSet {
    1: required i64 startRowOffset
    2: required list<TRow> rows
    3: optional list<TColumn> columns
+ 4: optional binary binaryColumns
+ 5: optional i32 columnCount
  }

  // The return status code contained in each response.

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
index 3a27a60..395af2c 100644
--- a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
+++ b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.cpp
@@ -3858,6 +3858,16 @@ void TRowSet::__set_columns(const std::vector<TColumn> & val) {
  __isset.columns = true;
  }

+void TRowSet::__set_binaryColumns(const std::string& val) {
+ this->binaryColumns = val;
+__isset.binaryColumns = true;
+}
+
+void TRowSet::__set_columnCount(const int32_t val) {
+ this->columnCount = val;
+__isset.columnCount = true;
+}
+
  uint32_t TRowSet::read(::apache::thrift::protocol::TProtocol* iprot) {

    apache::thrift::protocol::TInputRecursionTracker tracker(*iprot);
@@ -3929,6 +3939,22 @@ uint32_t TRowSet::read(::apache::thrift::protocol::TProtocol* iprot) {
            xfer += iprot->skip(ftype);
          }
          break;
+ case 4:
+ if (ftype == ::apache::thrift::protocol::T_STRING) {
+ xfer += iprot->readBinary(this->binaryColumns);
+ this->__isset.binaryColumns = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 5:
+ if (ftype == ::apache::thrift::protocol::T_I32) {
+ xfer += iprot->readI32(this->columnCount);
+ this->__isset.columnCount = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
        default:
          xfer += iprot->skip(ftype);
          break;
@@ -3979,6 +4005,16 @@ uint32_t TRowSet::write(::apache::thrift::protocol::TProtocol* oprot) const {
      }
      xfer += oprot->writeFieldEnd();
    }
+ if (this->__isset.binaryColumns) {
+ xfer += oprot->writeFieldBegin("binaryColumns", ::apache::thrift::protocol::T_STRING, 4);
+ xfer += oprot->writeBinary(this->binaryColumns);
+ xfer += oprot->writeFieldEnd();
+ }
+ if (this->__isset.columnCount) {
+ xfer += oprot->writeFieldBegin("columnCount", ::apache::thrift::protocol::T_I32, 5);
+ xfer += oprot->writeI32(this->columnCount);
+ xfer += oprot->writeFieldEnd();
+ }
    xfer += oprot->writeFieldStop();
    xfer += oprot->writeStructEnd();
    return xfer;
@@ -3989,6 +4025,8 @@ void swap(TRowSet &a, TRowSet &b) {
    swap(a.startRowOffset, b.startRowOffset);
    swap(a.rows, b.rows);
    swap(a.columns, b.columns);
+ swap(a.binaryColumns, b.binaryColumns);
+ swap(a.columnCount, b.columnCount);
    swap(a.__isset, b.__isset);
  }

@@ -3996,12 +4034,16 @@ TRowSet::TRowSet(const TRowSet& other163) {
    startRowOffset = other163.startRowOffset;
    rows = other163.rows;
    columns = other163.columns;
+ binaryColumns = other163.binaryColumns;
+ columnCount = other163.columnCount;
    __isset = other163.__isset;
  }
  TRowSet& TRowSet::operator=(const TRowSet& other164) {
    startRowOffset = other164.startRowOffset;
    rows = other164.rows;
    columns = other164.columns;
+ binaryColumns = other164.binaryColumns;
+ columnCount = other164.columnCount;
    __isset = other164.__isset;
    return *this;
  }
@@ -4011,6 +4053,8 @@ void TRowSet::printTo(std::ostream& out) const {
    out << "startRowOffset=" << to_string(startRowOffset);
    out << ", " << "rows=" << to_string(rows);
    out << ", " << "columns="; (__isset.columns ? (out << to_string(columns)) : (out << "<null>"));
+ out << ", " << "binaryColumns="; (__isset.binaryColumns ? (out << to_string(binaryColumns)) : (out << "<null>"));
+ out << ", " << "columnCount="; (__isset.columnCount ? (out << to_string(columnCount)) : (out << "<null>"));
    out << ")";
  }


http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
index 7f1d9dd..d4b401c 100644
--- a/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
+++ b/service-rpc/src/gen/thrift/gen-cpp/TCLIService_types.h
@@ -1809,8 +1809,10 @@ inline std::ostream& operator<<(std::ostream& out, const TColumn& obj)
  }

  typedef struct _TRowSet__isset {
- _TRowSet__isset() : columns(false) {}
+ _TRowSet__isset() : columns(false), binaryColumns(false), columnCount(false) {}
    bool columns :1;
+ bool binaryColumns :1;
+ bool columnCount :1;
  } _TRowSet__isset;

  class TRowSet {
@@ -1818,13 +1820,15 @@ class TRowSet {

    TRowSet(const TRowSet&);
    TRowSet& operator=(const TRowSet&);
- TRowSet() : startRowOffset(0) {
+ TRowSet() : startRowOffset(0), binaryColumns(), columnCount(0) {
    }

    virtual ~TRowSet() throw();
    int64_t startRowOffset;
    std::vector<TRow> rows;
    std::vector<TColumn> columns;
+ std::string binaryColumns;
+ int32_t columnCount;

    _TRowSet__isset __isset;

@@ -1834,6 +1838,10 @@ class TRowSet {

    void __set_columns(const std::vector<TColumn> & val);

+ void __set_binaryColumns(const std::string& val);
+
+ void __set_columnCount(const int32_t val);
+
    bool operator == (const TRowSet & rhs) const
    {
      if (!(startRowOffset == rhs.startRowOffset))
@@ -1844,6 +1852,14 @@ class TRowSet {
        return false;
      else if (__isset.columns && !(columns == rhs.columns))
        return false;
+ if (__isset.binaryColumns != rhs.__isset.binaryColumns)
+ return false;
+ else if (__isset.binaryColumns && !(binaryColumns == rhs.binaryColumns))
+ return false;
+ if (__isset.columnCount != rhs.__isset.columnCount)
+ return false;
+ else if (__isset.columnCount && !(columnCount == rhs.columnCount))
+ return false;
      return true;
    }
    bool operator != (const TRowSet &rhs) const {

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TRowSet.java
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TRowSet.java b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TRowSet.java
index 2f6e31c..da3d9d3 100644
--- a/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TRowSet.java
+++ b/service-rpc/src/gen/thrift/gen-javabean/org/apache/hive/service/rpc/thrift/TRowSet.java
@@ -41,6 +41,8 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
    private static final org.apache.thrift.protocol.TField START_ROW_OFFSET_FIELD_DESC = new org.apache.thrift.protocol.TField("startRowOffset", org.apache.thrift.protocol.TType.I64, (short)1);
    private static final org.apache.thrift.protocol.TField ROWS_FIELD_DESC = new org.apache.thrift.protocol.TField("rows", org.apache.thrift.protocol.TType.LIST, (short)2);
    private static final org.apache.thrift.protocol.TField COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("columns", org.apache.thrift.protocol.TType.LIST, (short)3);
+ private static final org.apache.thrift.protocol.TField BINARY_COLUMNS_FIELD_DESC = new org.apache.thrift.protocol.TField("binaryColumns", org.apache.thrift.protocol.TType.STRING, (short)4);
+ private static final org.apache.thrift.protocol.TField COLUMN_COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("columnCount", org.apache.thrift.protocol.TType.I32, (short)5);

    private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
    static {
@@ -51,12 +53,16 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
    private long startRowOffset; // required
    private List<TRow> rows; // required
    private List<TColumn> columns; // optional
+ private ByteBuffer binaryColumns; // optional
+ private int columnCount; // optional

    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
      START_ROW_OFFSET((short)1, "startRowOffset"),
      ROWS((short)2, "rows"),
- COLUMNS((short)3, "columns");
+ COLUMNS((short)3, "columns"),
+ BINARY_COLUMNS((short)4, "binaryColumns"),
+ COLUMN_COUNT((short)5, "columnCount");

      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();

@@ -77,6 +83,10 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
            return ROWS;
          case 3: // COLUMNS
            return COLUMNS;
+ case 4: // BINARY_COLUMNS
+ return BINARY_COLUMNS;
+ case 5: // COLUMN_COUNT
+ return COLUMN_COUNT;
          default:
            return null;
        }
@@ -118,8 +128,9 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields

    // isset id assignments
    private static final int __STARTROWOFFSET_ISSET_ID = 0;
+ private static final int __COLUMNCOUNT_ISSET_ID = 1;
    private byte __isset_bitfield = 0;
- private static final _Fields optionals[] = {_Fields.COLUMNS};
+ private static final _Fields optionals[] = {_Fields.COLUMNS,_Fields.BINARY_COLUMNS,_Fields.COLUMN_COUNT};
    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
    static {
      Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
@@ -131,6 +142,10 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
      tmpMap.put(_Fields.COLUMNS, new org.apache.thrift.meta_data.FieldMetaData("columns", org.apache.thrift.TFieldRequirementType.OPTIONAL,
          new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
              new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TColumn.class))));
+ tmpMap.put(_Fields.BINARY_COLUMNS, new org.apache.thrift.meta_data.FieldMetaData("binaryColumns", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)));
+ tmpMap.put(_Fields.COLUMN_COUNT, new org.apache.thrift.meta_data.FieldMetaData("columnCount", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
      metaDataMap = Collections.unmodifiableMap(tmpMap);
      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TRowSet.class, metaDataMap);
    }
@@ -168,6 +183,10 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
        }
        this.columns = __this__columns;
      }
+ if (other.isSetBinaryColumns()) {
+ this.binaryColumns = org.apache.thrift.TBaseHelper.copyBinary(other.binaryColumns);
+ }
+ this.columnCount = other.columnCount;
    }

    public TRowSet deepCopy() {
@@ -180,6 +199,9 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
      this.startRowOffset = 0;
      this.rows = null;
      this.columns = null;
+ this.binaryColumns = null;
+ setColumnCountIsSet(false);
+ this.columnCount = 0;
    }

    public long getStartRowOffset() {
@@ -280,6 +302,60 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
      }
    }

+ public byte[] getBinaryColumns() {
+ setBinaryColumns(org.apache.thrift.TBaseHelper.rightSize(binaryColumns));
+ return binaryColumns == null ? null : binaryColumns.array();
+ }
+
+ public ByteBuffer bufferForBinaryColumns() {
+ return org.apache.thrift.TBaseHelper.copyBinary(binaryColumns);
+ }
+
+ public void setBinaryColumns(byte[] binaryColumns) {
+ this.binaryColumns = binaryColumns == null ? (ByteBuffer)null : ByteBuffer.wrap(Arrays.copyOf(binaryColumns, binaryColumns.length));
+ }
+
+ public void setBinaryColumns(ByteBuffer binaryColumns) {
+ this.binaryColumns = org.apache.thrift.TBaseHelper.copyBinary(binaryColumns);
+ }
+
+ public void unsetBinaryColumns() {
+ this.binaryColumns = null;
+ }
+
+ /** Returns true if field binaryColumns is set (has been assigned a value) and false otherwise */
+ public boolean isSetBinaryColumns() {
+ return this.binaryColumns != null;
+ }
+
+ public void setBinaryColumnsIsSet(boolean value) {
+ if (!value) {
+ this.binaryColumns = null;
+ }
+ }
+
+ public int getColumnCount() {
+ return this.columnCount;
+ }
+
+ public void setColumnCount(int columnCount) {
+ this.columnCount = columnCount;
+ setColumnCountIsSet(true);
+ }
+
+ public void unsetColumnCount() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __COLUMNCOUNT_ISSET_ID);
+ }
+
+ /** Returns true if field columnCount is set (has been assigned a value) and false otherwise */
+ public boolean isSetColumnCount() {
+ return EncodingUtils.testBit(__isset_bitfield, __COLUMNCOUNT_ISSET_ID);
+ }
+
+ public void setColumnCountIsSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __COLUMNCOUNT_ISSET_ID, value);
+ }
+
    public void setFieldValue(_Fields field, Object value) {
      switch (field) {
      case START_ROW_OFFSET:
@@ -306,6 +382,22 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
        }
        break;

+ case BINARY_COLUMNS:
+ if (value == null) {
+ unsetBinaryColumns();
+ } else {
+ setBinaryColumns((ByteBuffer)value);
+ }
+ break;
+
+ case COLUMN_COUNT:
+ if (value == null) {
+ unsetColumnCount();
+ } else {
+ setColumnCount((Integer)value);
+ }
+ break;
+
      }
    }

@@ -320,6 +412,12 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
      case COLUMNS:
        return getColumns();

+ case BINARY_COLUMNS:
+ return getBinaryColumns();
+
+ case COLUMN_COUNT:
+ return getColumnCount();
+
      }
      throw new IllegalStateException();
    }
@@ -337,6 +435,10 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
        return isSetRows();
      case COLUMNS:
        return isSetColumns();
+ case BINARY_COLUMNS:
+ return isSetBinaryColumns();
+ case COLUMN_COUNT:
+ return isSetColumnCount();
      }
      throw new IllegalStateException();
    }
@@ -381,6 +483,24 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
          return false;
      }

+ boolean this_present_binaryColumns = true && this.isSetBinaryColumns();
+ boolean that_present_binaryColumns = true && that.isSetBinaryColumns();
+ if (this_present_binaryColumns || that_present_binaryColumns) {
+ if (!(this_present_binaryColumns && that_present_binaryColumns))
+ return false;
+ if (!this.binaryColumns.equals(that.binaryColumns))
+ return false;
+ }
+
+ boolean this_present_columnCount = true && this.isSetColumnCount();
+ boolean that_present_columnCount = true && that.isSetColumnCount();
+ if (this_present_columnCount || that_present_columnCount) {
+ if (!(this_present_columnCount && that_present_columnCount))
+ return false;
+ if (this.columnCount != that.columnCount)
+ return false;
+ }
+
      return true;
    }

@@ -403,6 +523,16 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
      if (present_columns)
        list.add(columns);

+ boolean present_binaryColumns = true && (isSetBinaryColumns());
+ list.add(present_binaryColumns);
+ if (present_binaryColumns)
+ list.add(binaryColumns);
+
+ boolean present_columnCount = true && (isSetColumnCount());
+ list.add(present_columnCount);
+ if (present_columnCount)
+ list.add(columnCount);
+
      return list.hashCode();
    }

@@ -444,6 +574,26 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
          return lastComparison;
        }
      }
+ lastComparison = Boolean.valueOf(isSetBinaryColumns()).compareTo(other.isSetBinaryColumns());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetBinaryColumns()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.binaryColumns, other.binaryColumns);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(isSetColumnCount()).compareTo(other.isSetColumnCount());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetColumnCount()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.columnCount, other.columnCount);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
      return 0;
    }

@@ -485,6 +635,22 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
        }
        first = false;
      }
+ if (isSetBinaryColumns()) {
+ if (!first) sb.append(", ");
+ sb.append("binaryColumns:");
+ if (this.binaryColumns == null) {
+ sb.append("null");
+ } else {
+ org.apache.thrift.TBaseHelper.toString(this.binaryColumns, sb);
+ }
+ first = false;
+ }
+ if (isSetColumnCount()) {
+ if (!first) sb.append(", ");
+ sb.append("columnCount:");
+ sb.append(this.columnCount);
+ first = false;
+ }
      sb.append(")");
      return sb.toString();
    }
@@ -584,6 +750,22 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
              }
              break;
+ case 4: // BINARY_COLUMNS
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.binaryColumns = iprot.readBinary();
+ struct.setBinaryColumnsIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 5: // COLUMN_COUNT
+ if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+ struct.columnCount = iprot.readI32();
+ struct.setColumnCountIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
            default:
              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
          }
@@ -626,6 +808,18 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
            oprot.writeFieldEnd();
          }
        }
+ if (struct.binaryColumns != null) {
+ if (struct.isSetBinaryColumns()) {
+ oprot.writeFieldBegin(BINARY_COLUMNS_FIELD_DESC);
+ oprot.writeBinary(struct.binaryColumns);
+ oprot.writeFieldEnd();
+ }
+ }
+ if (struct.isSetColumnCount()) {
+ oprot.writeFieldBegin(COLUMN_COUNT_FIELD_DESC);
+ oprot.writeI32(struct.columnCount);
+ oprot.writeFieldEnd();
+ }
        oprot.writeFieldStop();
        oprot.writeStructEnd();
      }
@@ -655,7 +849,13 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
        if (struct.isSetColumns()) {
          optionals.set(0);
        }
- oprot.writeBitSet(optionals, 1);
+ if (struct.isSetBinaryColumns()) {
+ optionals.set(1);
+ }
+ if (struct.isSetColumnCount()) {
+ optionals.set(2);
+ }
+ oprot.writeBitSet(optionals, 3);
        if (struct.isSetColumns()) {
          {
            oprot.writeI32(struct.columns.size());
@@ -665,6 +865,12 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
            }
          }
        }
+ if (struct.isSetBinaryColumns()) {
+ oprot.writeBinary(struct.binaryColumns);
+ }
+ if (struct.isSetColumnCount()) {
+ oprot.writeI32(struct.columnCount);
+ }
      }

      @Override
@@ -684,7 +890,7 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
          }
        }
        struct.setRowsIsSet(true);
- BitSet incoming = iprot.readBitSet(1);
+ BitSet incoming = iprot.readBitSet(3);
        if (incoming.get(0)) {
          {
            org.apache.thrift.protocol.TList _list131 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
@@ -699,6 +905,14 @@ public class TRowSet implements org.apache.thrift.TBase<TRowSet, TRowSet._Fields
          }
          struct.setColumnsIsSet(true);
        }
+ if (incoming.get(1)) {
+ struct.binaryColumns = iprot.readBinary();
+ struct.setBinaryColumnsIsSet(true);
+ }
+ if (incoming.get(2)) {
+ struct.columnCount = iprot.readI32();
+ struct.setColumnCountIsSet(true);
+ }
      }
    }


http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service-rpc/src/gen/thrift/gen-php/Types.php
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-php/Types.php b/service-rpc/src/gen/thrift/gen-php/Types.php
index b7df50a..fc12770 100644
--- a/service-rpc/src/gen/thrift/gen-php/Types.php
+++ b/service-rpc/src/gen/thrift/gen-php/Types.php
@@ -3772,6 +3772,14 @@ class TRowSet {
     * @var \TColumn[]
     */
    public $columns = null;
+ /**
+ * @var string
+ */
+ public $binaryColumns = null;
+ /**
+ * @var int
+ */
+ public $columnCount = null;

    public function __construct($vals=null) {
      if (!isset(self::$_TSPEC)) {
@@ -3798,6 +3806,14 @@ class TRowSet {
              'class' => '\TColumn',
              ),
            ),
+ 4 => array(
+ 'var' => 'binaryColumns',
+ 'type' => TType::STRING,
+ ),
+ 5 => array(
+ 'var' => 'columnCount',
+ 'type' => TType::I32,
+ ),
          );
      }
      if (is_array($vals)) {
@@ -3810,6 +3826,12 @@ class TRowSet {
        if (isset($vals['columns'])) {
          $this->columns = $vals['columns'];
        }
+ if (isset($vals['binaryColumns'])) {
+ $this->binaryColumns = $vals['binaryColumns'];
+ }
+ if (isset($vals['columnCount'])) {
+ $this->columnCount = $vals['columnCount'];
+ }
      }
    }

@@ -3875,6 +3897,20 @@ class TRowSet {
              $xfer += $input->skip($ftype);
            }
            break;
+ case 4:
+ if ($ftype == TType::STRING) {
+ $xfer += $input->readString($this->binaryColumns);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
+ case 5:
+ if ($ftype == TType::I32) {
+ $xfer += $input->readI32($this->columnCount);
+ } else {
+ $xfer += $input->skip($ftype);
+ }
+ break;
          default:
            $xfer += $input->skip($ftype);
            break;
@@ -3927,6 +3963,16 @@ class TRowSet {
        }
        $xfer += $output->writeFieldEnd();
      }
+ if ($this->binaryColumns !== null) {
+ $xfer += $output->writeFieldBegin('binaryColumns', TType::STRING, 4);
+ $xfer += $output->writeString($this->binaryColumns);
+ $xfer += $output->writeFieldEnd();
+ }
+ if ($this->columnCount !== null) {
+ $xfer += $output->writeFieldBegin('columnCount', TType::I32, 5);
+ $xfer += $output->writeI32($this->columnCount);
+ $xfer += $output->writeFieldEnd();
+ }
      $xfer += $output->writeFieldStop();
      $xfer += $output->writeStructEnd();
      return $xfer;

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py b/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
index c691781..231d001 100644
--- a/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
+++ b/service-rpc/src/gen/thrift/gen-py/TCLIService/ttypes.py
@@ -2965,6 +2965,8 @@ class TRowSet:
     - startRowOffset
     - rows
     - columns
+ - binaryColumns
+ - columnCount
    """

    thrift_spec = (
@@ -2972,12 +2974,16 @@ class TRowSet:
      (1, TType.I64, 'startRowOffset', None, None, ), # 1
      (2, TType.LIST, 'rows', (TType.STRUCT,(TRow, TRow.thrift_spec)), None, ), # 2
      (3, TType.LIST, 'columns', (TType.STRUCT,(TColumn, TColumn.thrift_spec)), None, ), # 3
+ (4, TType.STRING, 'binaryColumns', None, None, ), # 4
+ (5, TType.I32, 'columnCount', None, None, ), # 5
    )

- def __init__(self, startRowOffset=None, rows=None, columns=None,):
+ def __init__(self, startRowOffset=None, rows=None, columns=None, binaryColumns=None, columnCount=None,):
      self.startRowOffset = startRowOffset
      self.rows = rows
      self.columns = columns
+ self.binaryColumns = binaryColumns
+ self.columnCount = columnCount

    def read(self, iprot):
      if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -3015,6 +3021,16 @@ class TRowSet:
            iprot.readListEnd()
          else:
            iprot.skip(ftype)
+ elif fid == 4:
+ if ftype == TType.STRING:
+ self.binaryColumns = iprot.readString()
+ else:
+ iprot.skip(ftype)
+ elif fid == 5:
+ if ftype == TType.I32:
+ self.columnCount = iprot.readI32()
+ else:
+ iprot.skip(ftype)
        else:
          iprot.skip(ftype)
        iprot.readFieldEnd()
@@ -3043,6 +3059,14 @@ class TRowSet:
          iter117.write(oprot)
        oprot.writeListEnd()
        oprot.writeFieldEnd()
+ if self.binaryColumns is not None:
+ oprot.writeFieldBegin('binaryColumns', TType.STRING, 4)
+ oprot.writeString(self.binaryColumns)
+ oprot.writeFieldEnd()
+ if self.columnCount is not None:
+ oprot.writeFieldBegin('columnCount', TType.I32, 5)
+ oprot.writeI32(self.columnCount)
+ oprot.writeFieldEnd()
      oprot.writeFieldStop()
      oprot.writeStructEnd()

@@ -3059,6 +3083,8 @@ class TRowSet:
      value = (value * 31) ^ hash(self.startRowOffset)
      value = (value * 31) ^ hash(self.rows)
      value = (value * 31) ^ hash(self.columns)
+ value = (value * 31) ^ hash(self.binaryColumns)
+ value = (value * 31) ^ hash(self.columnCount)
      return value

    def __repr__(self):

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
----------------------------------------------------------------------
diff --git a/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb b/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
index 07ed97c..28cae72 100644
--- a/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
+++ b/service-rpc/src/gen/thrift/gen-rb/t_c_l_i_service_types.rb
@@ -832,11 +832,15 @@ class TRowSet
    STARTROWOFFSET = 1
    ROWS = 2
    COLUMNS = 3
+ BINARYCOLUMNS = 4
+ COLUMNCOUNT = 5

    FIELDS = {
      STARTROWOFFSET => {:type => ::Thrift::Types::I64, :name => 'startRowOffset'},
      ROWS => {:type => ::Thrift::Types::LIST, :name => 'rows', :element => {:type => ::Thrift::Types::STRUCT, :class => ::TRow}},
- COLUMNS => {:type => ::Thrift::Types::LIST, :name => 'columns', :element => {:type => ::Thrift::Types::STRUCT, :class => ::TColumn}, :optional => true}
+ COLUMNS => {:type => ::Thrift::Types::LIST, :name => 'columns', :element => {:type => ::Thrift::Types::STRUCT, :class => ::TColumn}, :optional => true},
+ BINARYCOLUMNS => {:type => ::Thrift::Types::STRING, :name => 'binaryColumns', :binary => true, :optional => true},
+ COLUMNCOUNT => {:type => ::Thrift::Types::I32, :name => 'columnCount', :optional => true}
    }

    def struct_fields; FIELDS; end

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/Column.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/Column.java b/service/src/java/org/apache/hive/service/cli/Column.java
deleted file mode 100644
index 102d920..0000000
--- a/service/src/java/org/apache/hive/service/cli/Column.java
+++ /dev/null
@@ -1,434 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import java.nio.ByteBuffer;
-import java.util.AbstractList;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.List;
-
-import org.apache.hive.service.rpc.thrift.TBinaryColumn;
-import org.apache.hive.service.rpc.thrift.TBoolColumn;
-import org.apache.hive.service.rpc.thrift.TByteColumn;
-import org.apache.hive.service.rpc.thrift.TColumn;
-import org.apache.hive.service.rpc.thrift.TDoubleColumn;
-import org.apache.hive.service.rpc.thrift.TI16Column;
-import org.apache.hive.service.rpc.thrift.TI32Column;
-import org.apache.hive.service.rpc.thrift.TI64Column;
-import org.apache.hive.service.rpc.thrift.TStringColumn;
-
-import com.google.common.primitives.Booleans;
-import com.google.common.primitives.Bytes;
-import com.google.common.primitives.Doubles;
-import com.google.common.primitives.Ints;
-import com.google.common.primitives.Longs;
-import com.google.common.primitives.Shorts;
-
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-
-/**
- * Column.
- */
-public class Column extends AbstractList {
-
- private static final int DEFAULT_SIZE = 100;
-
- private final Type type;
-
- private BitSet nulls;
-
- private int size;
- private boolean[] boolVars;
- private byte[] byteVars;
- private short[] shortVars;
- private int[] intVars;
- private long[] longVars;
- private double[] doubleVars;
- private List<String> stringVars;
- private List<ByteBuffer> binaryVars;
-
- public Column(Type type, BitSet nulls, Object values) {
- this.type = type;
- this.nulls = nulls;
- if (type == Type.BOOLEAN_TYPE) {
- boolVars = (boolean[]) values;
- size = boolVars.length;
- } else if (type == Type.TINYINT_TYPE) {
- byteVars = (byte[]) values;
- size = byteVars.length;
- } else if (type == Type.SMALLINT_TYPE) {
- shortVars = (short[]) values;
- size = shortVars.length;
- } else if (type == Type.INT_TYPE) {
- intVars = (int[]) values;
- size = intVars.length;
- } else if (type == Type.BIGINT_TYPE) {
- longVars = (long[]) values;
- size = longVars.length;
- } else if (type == Type.DOUBLE_TYPE) {
- doubleVars = (double[]) values;
- size = doubleVars.length;
- } else if (type == Type.BINARY_TYPE) {
- binaryVars = (List<ByteBuffer>) values;
- size = binaryVars.size();
- } else if (type == Type.STRING_TYPE) {
- stringVars = (List<String>) values;
- size = stringVars.size();
- } else {
- throw new IllegalStateException("invalid union object");
- }
- }
-
- public Column(Type type) {
- nulls = new BitSet();
- switch (type) {
- case BOOLEAN_TYPE:
- boolVars = new boolean[DEFAULT_SIZE];
- break;
- case TINYINT_TYPE:
- byteVars = new byte[DEFAULT_SIZE];
- break;
- case SMALLINT_TYPE:
- shortVars = new short[DEFAULT_SIZE];
- break;
- case INT_TYPE:
- intVars = new int[DEFAULT_SIZE];
- break;
- case BIGINT_TYPE:
- longVars = new long[DEFAULT_SIZE];
- break;
- case FLOAT_TYPE:
- case DOUBLE_TYPE:
- type = Type.DOUBLE_TYPE;
- doubleVars = new double[DEFAULT_SIZE];
- break;
- case BINARY_TYPE:
- binaryVars = new ArrayList<ByteBuffer>();
- break;
- default:
- type = Type.STRING_TYPE;
- stringVars = new ArrayList<String>();
- }
- this.type = type;
- }
-
- public Column(TColumn colValues) {
- if (colValues.isSetBoolVal()) {
- type = Type.BOOLEAN_TYPE;
- nulls = toBitset(colValues.getBoolVal().getNulls());
- boolVars = Booleans.toArray(colValues.getBoolVal().getValues());
- size = boolVars.length;
- } else if (colValues.isSetByteVal()) {
- type = Type.TINYINT_TYPE;
- nulls = toBitset(colValues.getByteVal().getNulls());
- byteVars = Bytes.toArray(colValues.getByteVal().getValues());
- size = byteVars.length;
- } else if (colValues.isSetI16Val()) {
- type = Type.SMALLINT_TYPE;
- nulls = toBitset(colValues.getI16Val().getNulls());
- shortVars = Shorts.toArray(colValues.getI16Val().getValues());
- size = shortVars.length;
- } else if (colValues.isSetI32Val()) {
- type = Type.INT_TYPE;
- nulls = toBitset(colValues.getI32Val().getNulls());
- intVars = Ints.toArray(colValues.getI32Val().getValues());
- size = intVars.length;
- } else if (colValues.isSetI64Val()) {
- type = Type.BIGINT_TYPE;
- nulls = toBitset(colValues.getI64Val().getNulls());
- longVars = Longs.toArray(colValues.getI64Val().getValues());
- size = longVars.length;
- } else if (colValues.isSetDoubleVal()) {
- type = Type.DOUBLE_TYPE;
- nulls = toBitset(colValues.getDoubleVal().getNulls());
- doubleVars = Doubles.toArray(colValues.getDoubleVal().getValues());
- size = doubleVars.length;
- } else if (colValues.isSetBinaryVal()) {
- type = Type.BINARY_TYPE;
- nulls = toBitset(colValues.getBinaryVal().getNulls());
- binaryVars = colValues.getBinaryVal().getValues();
- size = binaryVars.size();
- } else if (colValues.isSetStringVal()) {
- type = Type.STRING_TYPE;
- nulls = toBitset(colValues.getStringVal().getNulls());
- stringVars = colValues.getStringVal().getValues();
- size = stringVars.size();
- } else {
- throw new IllegalStateException("invalid union object");
- }
- }
-
- public Column extractSubset(int start, int end) {
- BitSet subNulls = nulls.get(start, end);
- if (type == Type.BOOLEAN_TYPE) {
- Column subset = new Column(type, subNulls, Arrays.copyOfRange(boolVars, start, end));
- boolVars = Arrays.copyOfRange(boolVars, end, size);
- nulls = nulls.get(start, size);
- size = boolVars.length;
- return subset;
- }
- if (type == Type.TINYINT_TYPE) {
- Column subset = new Column(type, subNulls, Arrays.copyOfRange(byteVars, start, end));
- byteVars = Arrays.copyOfRange(byteVars, end, size);
- nulls = nulls.get(start, size);
- size = byteVars.length;
- return subset;
- }
- if (type == Type.SMALLINT_TYPE) {
- Column subset = new Column(type, subNulls, Arrays.copyOfRange(shortVars, start, end));
- shortVars = Arrays.copyOfRange(shortVars, end, size);
- nulls = nulls.get(start, size);
- size = shortVars.length;
- return subset;
- }
- if (type == Type.INT_TYPE) {
- Column subset = new Column(type, subNulls, Arrays.copyOfRange(intVars, start, end));
- intVars = Arrays.copyOfRange(intVars, end, size);
- nulls = nulls.get(start, size);
- size = intVars.length;
- return subset;
- }
- if (type == Type.BIGINT_TYPE) {
- Column subset = new Column(type, subNulls, Arrays.copyOfRange(longVars, start, end));
- longVars = Arrays.copyOfRange(longVars, end, size);
- nulls = nulls.get(start, size);
- size = longVars.length;
- return subset;
- }
- if (type == Type.DOUBLE_TYPE) {
- Column subset = new Column(type, subNulls, Arrays.copyOfRange(doubleVars, start, end));
- doubleVars = Arrays.copyOfRange(doubleVars, end, size);
- nulls = nulls.get(start, size);
- size = doubleVars.length;
- return subset;
- }
- if (type == Type.BINARY_TYPE) {
- Column subset = new Column(type, subNulls, binaryVars.subList(start, end));
- binaryVars = binaryVars.subList(end, binaryVars.size());
- nulls = nulls.get(start, size);
- size = binaryVars.size();
- return subset;
- }
- if (type == Type.STRING_TYPE) {
- Column subset = new Column(type, subNulls, stringVars.subList(start, end));
- stringVars = stringVars.subList(end, stringVars.size());
- nulls = nulls.get(start, size);
- size = stringVars.size();
- return subset;
- }
- throw new IllegalStateException("invalid union object");
- }
-
- private static final byte[] MASKS = new byte[] {
- 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, (byte)0x80
- };
-
- private static BitSet toBitset(byte[] nulls) {
- BitSet bitset = new BitSet();
- int bits = nulls.length * 8;
- for (int i = 0; i < bits; i++) {
- bitset.set(i, (nulls[i / 8] & MASKS[i % 8]) != 0);
- }
- return bitset;
- }
-
- private static byte[] toBinary(BitSet bitset) {
- byte[] nulls = new byte[1 + (bitset.length() / 8)];
- for (int i = 0; i < bitset.length(); i++) {
- nulls[i / 8] |= bitset.get(i) ? MASKS[i % 8] : 0;
- }
- return nulls;
- }
-
- public Type getType() {
- return type;
- }
-
- @Override
- public Object get(int index) {
- if (nulls.get(index)) {
- return null;
- }
- switch (type) {
- case BOOLEAN_TYPE:
- return boolVars[index];
- case TINYINT_TYPE:
- return byteVars[index];
- case SMALLINT_TYPE:
- return shortVars[index];
- case INT_TYPE:
- return intVars[index];
- case BIGINT_TYPE:
- return longVars[index];
- case DOUBLE_TYPE:
- return doubleVars[index];
- case STRING_TYPE:
- return stringVars.get(index);
- case BINARY_TYPE:
- return binaryVars.get(index).array();
- }
- return null;
- }
-
- @Override
- public int size() {
- return size;
- }
-
- public TColumn toTColumn() {
- TColumn value = new TColumn();
- ByteBuffer nullMasks = ByteBuffer.wrap(toBinary(nulls));
- switch (type) {
- case BOOLEAN_TYPE:
- value.setBoolVal(new TBoolColumn(Booleans.asList(Arrays.copyOfRange(boolVars, 0, size)), nullMasks));
- break;
- case TINYINT_TYPE:
- value.setByteVal(new TByteColumn(Bytes.asList(Arrays.copyOfRange(byteVars, 0, size)), nullMasks));
- break;
- case SMALLINT_TYPE:
- value.setI16Val(new TI16Column(Shorts.asList(Arrays.copyOfRange(shortVars, 0, size)), nullMasks));
- break;
- case INT_TYPE:
- value.setI32Val(new TI32Column(Ints.asList(Arrays.copyOfRange(intVars, 0, size)), nullMasks));
- break;
- case BIGINT_TYPE:
- value.setI64Val(new TI64Column(Longs.asList(Arrays.copyOfRange(longVars, 0, size)), nullMasks));
- break;
- case DOUBLE_TYPE:
- value.setDoubleVal(new TDoubleColumn(Doubles.asList(Arrays.copyOfRange(doubleVars, 0, size)), nullMasks));
- break;
- case STRING_TYPE:
- value.setStringVal(new TStringColumn(stringVars, nullMasks));
- break;
- case BINARY_TYPE:
- value.setBinaryVal(new TBinaryColumn(binaryVars, nullMasks));
- break;
- }
- return value;
- }
-
- private static final ByteBuffer EMPTY_BINARY = ByteBuffer.allocate(0);
- private static final String EMPTY_STRING = "";
-
- public void addValue(TypeDescriptor typeDescriptor, Object field) {
- if (field != null && typeDescriptor.getType() == Type.DECIMAL_TYPE) {
- int scale = typeDescriptor.getDecimalDigits();
- field = ((HiveDecimal) field).toFormatString(scale);
- }
- addValue(typeDescriptor.getType(), field);
- }
-
- public void addValue(Type type, Object field) {
- switch (type) {
- case BOOLEAN_TYPE:
- nulls.set(size, field == null);
- boolVars()[size] = field == null ? true : (Boolean)field;
- break;
- case TINYINT_TYPE:
- nulls.set(size, field == null);
- byteVars()[size] = field == null ? 0 : (Byte) field;
- break;
- case SMALLINT_TYPE:
- nulls.set(size, field == null);
- shortVars()[size] = field == null ? 0 : (Short)field;
- break;
- case INT_TYPE:
- nulls.set(size, field == null);
- intVars()[size] = field == null ? 0 : (Integer)field;
- break;
- case BIGINT_TYPE:
- nulls.set(size, field == null);
- longVars()[size] = field == null ? 0 : (Long)field;
- break;
- case FLOAT_TYPE:
- nulls.set(size, field == null);
- doubleVars()[size] = field == null ? 0 : new Double(field.toString());
- break;
- case DOUBLE_TYPE:
- nulls.set(size, field == null);
- doubleVars()[size] = field == null ? 0 : (Double)field;
- break;
- case BINARY_TYPE:
- nulls.set(binaryVars.size(), field == null);
- binaryVars.add(field == null ? EMPTY_BINARY : ByteBuffer.wrap((byte[])field));
- break;
- default:
- nulls.set(stringVars.size(), field == null);
- stringVars.add(field == null ? EMPTY_STRING : String.valueOf(field));
- break;
- }
- size++;
- }
-
- private boolean[] boolVars() {
- if (boolVars.length == size) {
- boolean[] newVars = new boolean[size << 1];
- System.arraycopy(boolVars, 0, newVars, 0, size);
- return boolVars = newVars;
- }
- return boolVars;
- }
-
- private byte[] byteVars() {
- if (byteVars.length == size) {
- byte[] newVars = new byte[size << 1];
- System.arraycopy(byteVars, 0, newVars, 0, size);
- return byteVars = newVars;
- }
- return byteVars;
- }
-
- private short[] shortVars() {
- if (shortVars.length == size) {
- short[] newVars = new short[size << 1];
- System.arraycopy(shortVars, 0, newVars, 0, size);
- return shortVars = newVars;
- }
- return shortVars;
- }
-
- private int[] intVars() {
- if (intVars.length == size) {
- int[] newVars = new int[size << 1];
- System.arraycopy(intVars, 0, newVars, 0, size);
- return intVars = newVars;
- }
- return intVars;
- }
-
- private long[] longVars() {
- if (longVars.length == size) {
- long[] newVars = new long[size << 1];
- System.arraycopy(longVars, 0, newVars, 0, size);
- return longVars = newVars;
- }
- return longVars;
- }
-
- private double[] doubleVars() {
- if (doubleVars.length == size) {
- double[] newVars = new double[size << 1];
- System.arraycopy(doubleVars, 0, newVars, 0, size);
- return doubleVars = newVars;
- }
- return doubleVars;
- }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/ColumnBasedSet.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/ColumnBasedSet.java b/service/src/java/org/apache/hive/service/cli/ColumnBasedSet.java
index b7fe663..9cbe89c 100644
--- a/service/src/java/org/apache/hive/service/cli/ColumnBasedSet.java
+++ b/service/src/java/org/apache/hive/service/cli/ColumnBasedSet.java
@@ -18,13 +18,24 @@

  package org.apache.hive.service.cli;

+import java.io.ByteArrayInputStream;
  import java.util.ArrayList;
  import java.util.Iterator;
  import java.util.List;

+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.serde2.thrift.ColumnBuffer;
+import org.apache.hadoop.hive.serde2.thrift.Type;
  import org.apache.hive.service.rpc.thrift.TColumn;
  import org.apache.hive.service.rpc.thrift.TRow;
  import org.apache.hive.service.rpc.thrift.TRowSet;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TCompactProtocol;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.TIOStreamTransport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+

  /**
   * ColumnBasedSet.
@@ -34,40 +45,79 @@ public class ColumnBasedSet implements RowSet {
    private long startOffset;

    private final TypeDescriptor[] descriptors; // non-null only for writing (server-side)
- private final List<Column> columns;
+ private final List<ColumnBuffer> columns;
+ private byte[] blob;
+ private boolean isBlobBased = false;
+ public static final Logger LOG = LoggerFactory.getLogger(ColumnBasedSet.class);

    public ColumnBasedSet(TableSchema schema) {
      descriptors = schema.toTypeDescriptors();
- columns = new ArrayList<Column>();
+ columns = new ArrayList<ColumnBuffer>();
      for (ColumnDescriptor colDesc : schema.getColumnDescriptors()) {
- columns.add(new Column(colDesc.getType()));
+ columns.add(new ColumnBuffer(colDesc.getType()));
      }
    }

- public ColumnBasedSet(TRowSet tRowSet) {
+ public ColumnBasedSet(TRowSet tRowSet) throws TException {
      descriptors = null;
- columns = new ArrayList<Column>();
- for (TColumn tvalue : tRowSet.getColumns()) {
- columns.add(new Column(tvalue));
+ columns = new ArrayList<ColumnBuffer>();
+ // Use TCompactProtocol to read serialized TColumns
+ if (tRowSet.isSetBinaryColumns()) {
+ TProtocol protocol =
+ new TCompactProtocol(new TIOStreamTransport(new ByteArrayInputStream(
+ tRowSet.getBinaryColumns())));
+ // Read from the stream using the protocol for each column in final schema
+ for (int i = 0; i < tRowSet.getColumnCount(); i++) {
+ TColumn tvalue = new TColumn();
+ try {
+ tvalue.read(protocol);
+ } catch (TException e) {
+ LOG.error(e.getMessage(), e);
+ throw new TException("Error reading column value from the row set blob", e);
+ }
+ columns.add(new ColumnBuffer(tvalue));
+ }
+ }
+ else {
+ if (tRowSet.getColumns() != null) {
+ for (TColumn tvalue : tRowSet.getColumns()) {
+ columns.add(new ColumnBuffer(tvalue));
+ }
+ }
      }
      startOffset = tRowSet.getStartRowOffset();
    }

- private ColumnBasedSet(TypeDescriptor[] descriptors, List<Column> columns, long startOffset) {
+ private ColumnBasedSet(TypeDescriptor[] descriptors, List<ColumnBuffer> columns, long startOffset) {
      this.descriptors = descriptors;
      this.columns = columns;
      this.startOffset = startOffset;
    }

+ public ColumnBasedSet(TableSchema schema, boolean isBlobBased) {
+ this(schema);
+ this.isBlobBased = isBlobBased;
+ }
+
    @Override
    public ColumnBasedSet addRow(Object[] fields) {
- for (int i = 0; i < fields.length; i++) {
- columns.get(i).addValue(descriptors[i], fields[i]);
+ if (isBlobBased) {
+ this.blob = (byte[]) fields[0];
+ } else {
+ for (int i = 0; i < fields.length; i++) {
+ TypeDescriptor descriptor = descriptors[i];
+ Object field = fields[i];
+ if (field != null && descriptor.getType() == Type.DECIMAL_TYPE) {
+ int scale = descriptor.getDecimalDigits();
+ field = ((HiveDecimal) field).toFormatString(scale);
+ }
+ columns.get(i).addValue(descriptor.getType(), field);
+ }
      }
      return this;
    }

- public List<Column> getColumns() {
+ public List<ColumnBuffer> getColumns() {
      return columns;
    }

@@ -85,7 +135,7 @@ public class ColumnBasedSet implements RowSet {
    public ColumnBasedSet extractSubset(int maxRows) {
      int numRows = Math.min(numRows(), maxRows);

- List<Column> subset = new ArrayList<Column>();
+ List<ColumnBuffer> subset = new ArrayList<ColumnBuffer>();
      for (int i = 0; i < columns.size(); i++) {
        subset.add(columns.get(i).extractSubset(0, numRows));
      }
@@ -106,8 +156,14 @@ public class ColumnBasedSet implements RowSet {

    public TRowSet toTRowSet() {
      TRowSet tRowSet = new TRowSet(startOffset, new ArrayList<TRow>());
- for (int i = 0; i < columns.size(); i++) {
- tRowSet.addToColumns(columns.get(i).toTColumn());
+ if (isBlobBased) {
+ tRowSet.setColumns(null);
+ tRowSet.setBinaryColumns(blob);
+ tRowSet.setColumnCount(numColumns());
+ } else {
+ for (int i = 0; i < columns.size(); i++) {
+ tRowSet.addToColumns(columns.get(i).toTColumn());
+ }
      }
      return tRowSet;
    }

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java b/service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
index 7bd9f06..bfd7135 100644
--- a/service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
+++ b/service/src/java/org/apache/hive/service/cli/ColumnDescriptor.java
@@ -18,7 +18,7 @@

  package org.apache.hive.service.cli;

-import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.serde2.thrift.Type;
  import org.apache.hive.service.rpc.thrift.TColumnDesc;


@@ -47,14 +47,8 @@ public class ColumnDescriptor {
      position = tColumnDesc.getPosition();
    }

- public ColumnDescriptor(FieldSchema column, int position) {
- name = column.getName();
- comment = column.getComment();
- type = new TypeDescriptor(column.getType());
- this.position = position;
- }
-
- public static ColumnDescriptor newPrimitiveColumnDescriptor(String name, String comment, Type type, int position) {
+ public static ColumnDescriptor newPrimitiveColumnDescriptor(String name, String comment,
+ Type type, int position) {
      // Current usage looks like it's only for metadata columns, but if that changes then
      // this method may need to require a type qualifiers aruments.
      return new ColumnDescriptor(name, comment, new TypeDescriptor(type), position);

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/ColumnValue.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/ColumnValue.java b/service/src/java/org/apache/hive/service/cli/ColumnValue.java
index 37460e6..28149e1 100644
--- a/service/src/java/org/apache/hive/service/cli/ColumnValue.java
+++ b/service/src/java/org/apache/hive/service/cli/ColumnValue.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
  import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
  import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
  import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.serde2.thrift.Type;
  import org.apache.hive.service.rpc.thrift.TBoolValue;
  import org.apache.hive.service.rpc.thrift.TByteValue;
  import org.apache.hive.service.rpc.thrift.TColumnValue;

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/RowSetFactory.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/RowSetFactory.java b/service/src/java/org/apache/hive/service/cli/RowSetFactory.java
index 1c2ff7f..d9be6a0 100644
--- a/service/src/java/org/apache/hive/service/cli/RowSetFactory.java
+++ b/service/src/java/org/apache/hive/service/cli/RowSetFactory.java
@@ -20,22 +20,25 @@ package org.apache.hive.service.cli;

  import org.apache.hive.service.rpc.thrift.TProtocolVersion;
  import org.apache.hive.service.rpc.thrift.TRowSet;
+import org.apache.thrift.TException;

  import static org.apache.hive.service.rpc.thrift.TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6;

  public class RowSetFactory {

- public static RowSet create(TableSchema schema, TProtocolVersion version) {
+ // This call is accessed from server side
+ public static RowSet create(TableSchema schema, TProtocolVersion version, boolean isBlobBased) {
      if (version.getValue() >= HIVE_CLI_SERVICE_PROTOCOL_V6.getValue()) {
- return new ColumnBasedSet(schema);
+ return new ColumnBasedSet(schema, isBlobBased);
      }
      return new RowBasedSet(schema);
    }

- public static RowSet create(TRowSet results, TProtocolVersion version) {
- if (version.getValue() >= HIVE_CLI_SERVICE_PROTOCOL_V6.getValue()) {
- return new ColumnBasedSet(results);
- }
- return new RowBasedSet(results);
+ // This call is accessed from client (jdbc) side
+ public static RowSet create(TRowSet results, TProtocolVersion version) throws TException {
+ if (version.getValue() >= HIVE_CLI_SERVICE_PROTOCOL_V6.getValue()) {
+ return new ColumnBasedSet(results);
+ }
+ return new RowBasedSet(results);
    }
  }

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/TableSchema.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/TableSchema.java b/service/src/java/org/apache/hive/service/cli/TableSchema.java
index 2206e2c..f5eda8a 100644
--- a/service/src/java/org/apache/hive/service/cli/TableSchema.java
+++ b/service/src/java/org/apache/hive/service/cli/TableSchema.java
@@ -23,6 +23,7 @@ import java.util.List;

  import org.apache.hadoop.hive.metastore.api.FieldSchema;
  import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hadoop.hive.serde2.thrift.Type;
  import org.apache.hive.service.rpc.thrift.TColumnDesc;
  import org.apache.hive.service.rpc.thrift.TTableSchema;

@@ -49,7 +50,8 @@ public class TableSchema {
    public TableSchema(List<FieldSchema> fieldSchemas) {
      int pos = 1;
      for (FieldSchema field : fieldSchemas) {
- columns.add(new ColumnDescriptor(field, pos++));
+ columns.add(new ColumnDescriptor(field.getName(), field.getComment(), new TypeDescriptor(
+ field.getType()), pos++));
      }
    }


http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/Type.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/Type.java b/service/src/java/org/apache/hive/service/cli/Type.java
deleted file mode 100644
index f067b3d..0000000
--- a/service/src/java/org/apache/hive/service/cli/Type.java
+++ /dev/null
@@ -1,348 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hive.service.cli;
-
-import java.sql.DatabaseMetaData;
-
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hive.service.rpc.thrift.TTypeId;
-
-/**
- * Type.
- *
- */
-public enum Type {
- NULL_TYPE("VOID",
- java.sql.Types.NULL,
- TTypeId.NULL_TYPE),
- BOOLEAN_TYPE("BOOLEAN",
- java.sql.Types.BOOLEAN,
- TTypeId.BOOLEAN_TYPE),
- TINYINT_TYPE("TINYINT",
- java.sql.Types.TINYINT,
- TTypeId.TINYINT_TYPE),
- SMALLINT_TYPE("SMALLINT",
- java.sql.Types.SMALLINT,
- TTypeId.SMALLINT_TYPE),
- INT_TYPE("INT",
- java.sql.Types.INTEGER,
- TTypeId.INT_TYPE),
- BIGINT_TYPE("BIGINT",
- java.sql.Types.BIGINT,
- TTypeId.BIGINT_TYPE),
- FLOAT_TYPE("FLOAT",
- java.sql.Types.FLOAT,
- TTypeId.FLOAT_TYPE),
- DOUBLE_TYPE("DOUBLE",
- java.sql.Types.DOUBLE,
- TTypeId.DOUBLE_TYPE),
- STRING_TYPE("STRING",
- java.sql.Types.VARCHAR,
- TTypeId.STRING_TYPE),
- CHAR_TYPE("CHAR",
- java.sql.Types.CHAR,
- TTypeId.CHAR_TYPE,
- true, false, false),
- VARCHAR_TYPE("VARCHAR",
- java.sql.Types.VARCHAR,
- TTypeId.VARCHAR_TYPE,
- true, false, false),
- DATE_TYPE("DATE",
- java.sql.Types.DATE,
- TTypeId.DATE_TYPE),
- TIMESTAMP_TYPE("TIMESTAMP",
- java.sql.Types.TIMESTAMP,
- TTypeId.TIMESTAMP_TYPE),
- INTERVAL_YEAR_MONTH_TYPE("INTERVAL_YEAR_MONTH",
- java.sql.Types.OTHER,
- TTypeId.INTERVAL_YEAR_MONTH_TYPE),
- INTERVAL_DAY_TIME_TYPE("INTERVAL_DAY_TIME",
- java.sql.Types.OTHER,
- TTypeId.INTERVAL_DAY_TIME_TYPE),
- BINARY_TYPE("BINARY",
- java.sql.Types.BINARY,
- TTypeId.BINARY_TYPE),
- DECIMAL_TYPE("DECIMAL",
- java.sql.Types.DECIMAL,
- TTypeId.DECIMAL_TYPE,
- true, false, false),
- ARRAY_TYPE("ARRAY",
- java.sql.Types.ARRAY,
- TTypeId.ARRAY_TYPE,
- true, true),
- MAP_TYPE("MAP",
- java.sql.Types.JAVA_OBJECT,
- TTypeId.MAP_TYPE,
- true, true),
- STRUCT_TYPE("STRUCT",
- java.sql.Types.STRUCT,
- TTypeId.STRUCT_TYPE,
- true, false),
- UNION_TYPE("UNIONTYPE",
- java.sql.Types.OTHER,
- TTypeId.UNION_TYPE,
- true, false),
- USER_DEFINED_TYPE("USER_DEFINED",
- java.sql.Types.OTHER,
- TTypeId.USER_DEFINED_TYPE,
- true, false);
-
- private final String name;
- private final TTypeId tType;
- private final int javaSQLType;
- private final boolean isQualified;
- private final boolean isComplex;
- private final boolean isCollection;
-
- Type(String name, int javaSQLType, TTypeId tType, boolean isQualified, boolean isComplex, boolean isCollection) {
- this.name = name;
- this.javaSQLType = javaSQLType;
- this.tType = tType;
- this.isQualified = isQualified;
- this.isComplex = isComplex;
- this.isCollection = isCollection;
- }
-
- Type(String name, int javaSQLType, TTypeId tType, boolean isComplex, boolean isCollection) {
- this(name, javaSQLType, tType, false, isComplex, isCollection);
- }
-
- Type(String name, int javaSqlType, TTypeId tType) {
- this(name, javaSqlType, tType, false, false, false);
- }
-
- public boolean isPrimitiveType() {
- return !isComplex;
- }
-
- public boolean isQualifiedType() {
- return isQualified;
- }
-
- public boolean isComplexType() {
- return isComplex;
- }
-
- public boolean isCollectionType() {
- return isCollection;
- }
-
- public static Type getType(TTypeId tType) {
- for (Type type : values()) {
- if (tType.equals(type.tType)) {
- return type;
- }
- }
- throw new IllegalArgumentException("Unrecognized Thrift TTypeId value: " + tType);
- }
-
- public static Type getType(String name) {
- if (name == null) {
- throw new IllegalArgumentException("Invalid type name: null");
- }
- for (Type type : values()) {
- if (name.equalsIgnoreCase(type.name)) {
- return type;
- } else if (type.isQualifiedType() || type.isComplexType()) {
- if (name.toUpperCase().startsWith(type.name)) {
- return type;
- }
- }
- }
- throw new IllegalArgumentException("Unrecognized type name: " + name);
- }
-
- /**
- * Radix for this type (typically either 2 or 10)
- * Null is returned for data types where this is not applicable.
- */
- public Integer getNumPrecRadix() {
- if (this.isNumericType()) {
- return 10;
- }
- return null;
- }
-
- /**
- * Maximum precision for numeric types.
- * Returns null for non-numeric types.
- * @return
- */
- public Integer getMaxPrecision() {
- switch (this) {
- case TINYINT_TYPE:
- return 3;
- case SMALLINT_TYPE:
- return 5;
- case INT_TYPE:
- return 10;
- case BIGINT_TYPE:
- return 19;
- case FLOAT_TYPE:
- return 7;
- case DOUBLE_TYPE:
- return 15;
- case DECIMAL_TYPE:
- return HiveDecimal.MAX_PRECISION;
- default:
- return null;
- }
- }
-
- public boolean isNumericType() {
- switch (this) {
- case TINYINT_TYPE:
- case SMALLINT_TYPE:
- case INT_TYPE:
- case BIGINT_TYPE:
- case FLOAT_TYPE:
- case DOUBLE_TYPE:
- case DECIMAL_TYPE:
- return true;
- default:
- return false;
- }
- }
-
- /**
- * Prefix used to quote a literal of this type (may be null)
- */
- public String getLiteralPrefix() {
- return null;
- }
-
- /**
- * Suffix used to quote a literal of this type (may be null)
- * @return
- */
- public String getLiteralSuffix() {
- return null;
- }
-
- /**
- * Can you use NULL for this type?
- * @return
- * DatabaseMetaData.typeNoNulls - does not allow NULL values
- * DatabaseMetaData.typeNullable - allows NULL values
- * DatabaseMetaData.typeNullableUnknown - nullability unknown
- */
- public Short getNullable() {
- // All Hive types are nullable
- return DatabaseMetaData.typeNullable;
- }
-
- /**
- * Is the type case sensitive?
- * @return
- */
- public Boolean isCaseSensitive() {
- switch (this) {
- case STRING_TYPE:
- return true;
- default:
- return false;
- }
- }
-
- /**
- * Parameters used in creating the type (may be null)
- * @return
- */
- public String getCreateParams() {
- return null;
- }
-
- /**
- * Can you use WHERE based on this type?
- * @return
- * DatabaseMetaData.typePredNone - No support
- * DatabaseMetaData.typePredChar - Only support with WHERE .. LIKE
- * DatabaseMetaData.typePredBasic - Supported except for WHERE .. LIKE
- * DatabaseMetaData.typeSearchable - Supported for all WHERE ..
- */
- public Short getSearchable() {
- if (isPrimitiveType()) {
- return DatabaseMetaData.typeSearchable;
- }
- return DatabaseMetaData.typePredNone;
- }
-
- /**
- * Is this type unsigned?
- * @return
- */
- public Boolean isUnsignedAttribute() {
- if (isNumericType()) {
- return false;
- }
- return true;
- }
-
- /**
- * Can this type represent money?
- * @return
- */
- public Boolean isFixedPrecScale() {
- return false;
- }
-
- /**
- * Can this type be used for an auto-increment value?
- * @return
- */
- public Boolean isAutoIncrement() {
- return false;
- }
-
- /**
- * Localized version of type name (may be null).
- * @return
- */
- public String getLocalizedName() {
- return null;
- }
-
- /**
- * Minimum scale supported for this type
- * @return
- */
- public Short getMinimumScale() {
- return 0;
- }
-
- /**
- * Maximum scale supported for this type
- * @return
- */
- public Short getMaximumScale() {
- return 0;
- }
-
- public TTypeId toTType() {
- return tType;
- }
-
- public int toJavaSQLType() {
- return javaSQLType;
- }
-
- public String getName() {
- return name;
- }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java b/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
index b4a5b77..d634bef 100644
--- a/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
+++ b/service/src/java/org/apache/hive/service/cli/TypeDescriptor.java
@@ -20,6 +20,7 @@ package org.apache.hive.service.cli;

  import java.util.List;

+import org.apache.hadoop.hive.serde2.thrift.Type;
  import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
  import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
  import org.apache.hive.service.rpc.thrift.TPrimitiveTypeEntry;

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
index 2eeee47..3dd33f2 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
@@ -43,7 +43,7 @@ public class GetCatalogsOperation extends MetadataOperation {

    protected GetCatalogsOperation(HiveSession parentSession) {
      super(parentSession, OperationType.GET_CATALOGS);
- rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
+ rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
    }

    @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
index 574a757..c075179 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
  import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.thrift.Type;
  import org.apache.hive.service.cli.ColumnDescriptor;
  import org.apache.hive.service.cli.FetchOrientation;
  import org.apache.hive.service.cli.HiveSQLException;
@@ -44,7 +45,6 @@ import org.apache.hive.service.cli.OperationType;
  import org.apache.hive.service.cli.RowSet;
  import org.apache.hive.service.cli.RowSetFactory;
  import org.apache.hive.service.cli.TableSchema;
-import org.apache.hive.service.cli.Type;
  import org.apache.hive.service.cli.session.HiveSession;

  /**
@@ -122,7 +122,7 @@ public class GetColumnsOperation extends MetadataOperation {
      this.schemaName = schemaName;
      this.tableName = tableName;
      this.columnName = columnName;
- this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
+ this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
    }

    @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
index d774f4f95..35b2e63 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObjectUtils;
+import org.apache.hadoop.hive.serde2.thrift.Type;
  import org.apache.hive.service.cli.CLIServiceUtils;
  import org.apache.hive.service.cli.FetchOrientation;
  import org.apache.hive.service.cli.HiveSQLException;
@@ -38,7 +39,6 @@ import org.apache.hive.service.cli.OperationType;
  import org.apache.hive.service.cli.RowSet;
  import org.apache.hive.service.cli.RowSetFactory;
  import org.apache.hive.service.cli.TableSchema;
-import org.apache.hive.service.cli.Type;
  import org.apache.hive.service.cli.session.HiveSession;
  import org.apache.thrift.TException;

@@ -67,13 +67,13 @@ public class GetFunctionsOperation extends MetadataOperation {

    private final RowSet rowSet;

- public GetFunctionsOperation(HiveSession parentSession,
- String catalogName, String schemaName, String functionName) {
+ public GetFunctionsOperation(HiveSession parentSession, String catalogName, String schemaName,
+ String functionName) {
      super(parentSession, OperationType.GET_FUNCTIONS);
      this.catalogName = catalogName;
      this.schemaName = schemaName;
      this.functionName = functionName;
- this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
+ this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
    }

    @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
index dc0a3dd..6013a3f 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
@@ -46,12 +46,11 @@ public class GetSchemasOperation extends MetadataOperation {

    private RowSet rowSet;

- protected GetSchemasOperation(HiveSession parentSession,
- String catalogName, String schemaName) {
+ protected GetSchemasOperation(HiveSession parentSession, String catalogName, String schemaName) {
      super(parentSession, OperationType.GET_SCHEMAS);
      this.catalogName = catalogName;
      this.schemaName = schemaName;
- this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
+ this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
    }

    @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
index 13d5b37..874435b 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
@@ -47,11 +47,10 @@ public class GetTableTypesOperation extends MetadataOperation {

    protected GetTableTypesOperation(HiveSession parentSession) {
      super(parentSession, OperationType.GET_TABLE_TYPES);
- String tableMappingStr = getParentSession().getHiveConf().
- getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING);
- tableTypeMapping =
- TableTypeMappingFactory.getTableTypeMapping(tableMappingStr);
- rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
+ String tableMappingStr =
+ getParentSession().getHiveConf().getVar(HiveConf.ConfVars.HIVE_SERVER2_TABLE_TYPE_MAPPING);
+ tableTypeMapping = TableTypeMappingFactory.getTableTypeMapping(tableMappingStr);
+ rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
    }

    @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
index aac3692..68d093a 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
@@ -84,7 +84,7 @@ public class GetTablesOperation extends MetadataOperation {
      } else {
        tableTypeList = null;
      }
- this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
+ this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
    }

    @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java b/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
index 53660af..db19024 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
  import java.util.Arrays;

  import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.serde2.thrift.Type;
  import org.apache.hive.service.cli.FetchOrientation;
  import org.apache.hive.service.cli.HiveSQLException;
  import org.apache.hive.service.cli.OperationState;
@@ -29,7 +30,6 @@ import org.apache.hive.service.cli.OperationType;
  import org.apache.hive.service.cli.RowSet;
  import org.apache.hive.service.cli.RowSetFactory;
  import org.apache.hive.service.cli.TableSchema;
-import org.apache.hive.service.cli.Type;
  import org.apache.hive.service.cli.session.HiveSession;

  /**
@@ -80,7 +80,7 @@ public class GetTypeInfoOperation extends MetadataOperation {

    protected GetTypeInfoOperation(HiveSession parentSession) {
      super(parentSession, OperationType.GET_TYPE_INFO);
- rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
+ rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion(), false);
    }

    @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
index f5a9771..f18dc67 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
@@ -173,10 +173,10 @@ public class HiveCommandOperation extends ExecuteStatementOperation {
        resetResultReader();
      }
      List<String> rows = readResults((int) maxRows);
- RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion());
+ RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion(), false);

      for (String row : rows) {
- rowSet.addRow(new String[] {row});
+ rowSet.addRow(new String[] { row });
      }
      return rowSet;
    }

Search Discussions

Discussion Posts

Follow ups

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 1 of 3 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedApr 22, '16 at 7:23p
activeApr 22, '16 at 7:23p
posts3
users1
websitehive.apache.org

1 user in discussion

Vgumashta: 3 posts

People

Translate

site design / logo © 2021 Grokbase