FAQ
Author: jdere
Date: Tue Apr 14 20:47:29 2015
New Revision: 1673553

URL: http://svn.apache.org/r1673553
Log:
HIVE-10226: Column stats for Date columns not supported (Jason Dere, reviewed by Ashutosh Chauhan)

Added:
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Date.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java
     hive/trunk/ql/src/test/queries/clientpositive/compute_stats_date.q
     hive/trunk/ql/src/test/results/clientpositive/compute_stats_date.q.out
Modified:
     hive/trunk/metastore/if/hive_metastore.thrift
     hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
     hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AggrStats.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatistics.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsData.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequest.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionListComposingSpec.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpecWithSharedSD.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionWithoutSD.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java
     hive/trunk/metastore/src/gen/thrift/gen-php/metastore/Types.php
     hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
     hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
     hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
     hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPartitionColumnStatistics.java
     hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTableColumnStatistics.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java

Modified: hive/trunk/metastore/if/hive_metastore.thrift
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/if/hive_metastore.thrift?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/if/hive_metastore.thrift (original)
+++ hive/trunk/metastore/if/hive_metastore.thrift Tue Apr 14 20:47:29 2015
@@ -368,13 +368,25 @@ struct DecimalColumnStatsData {
  4: required i64 numDVs
  }

+struct Date {
+1: required i64 daysSinceEpoch
+}
+
+struct DateColumnStatsData {
+1: optional Date lowValue,
+2: optional Date highValue,
+3: required i64 numNulls,
+4: required i64 numDVs
+}
+
  union ColumnStatisticsData {
  1: BooleanColumnStatsData booleanStats,
  2: LongColumnStatsData longStats,
  3: DoubleColumnStatsData doubleStats,
  4: StringColumnStatsData stringStats,
  5: BinaryColumnStatsData binaryStats,
-6: DecimalColumnStatsData decimalStats
+6: DecimalColumnStatsData decimalStats,
+7: DateColumnStatsData dateStats
  }

  struct ColumnStatisticsObj {

Modified: hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp Tue Apr 14 20:47:29 2015
@@ -4697,8 +4697,181 @@ void swap(DecimalColumnStatsData &a, Dec
    swap(a.__isset, b.__isset);
  }

-const char* ColumnStatisticsData::ascii_fingerprint = "D079ACEA6EE0998D0A45CB65FF1EAADD";
-const uint8_t ColumnStatisticsData::binary_fingerprint[16] = {0xD0,0x79,0xAC,0xEA,0x6E,0xE0,0x99,0x8D,0x0A,0x45,0xCB,0x65,0xFF,0x1E,0xAA,0xDD};
+const char* Date::ascii_fingerprint = "56A59CE7FFAF82BCA8A19FAACDE4FB75";
+const uint8_t Date::binary_fingerprint[16] = {0x56,0xA5,0x9C,0xE7,0xFF,0xAF,0x82,0xBC,0xA8,0xA1,0x9F,0xAA,0xCD,0xE4,0xFB,0x75};
+
+uint32_t Date::read(::apache::thrift::protocol::TProtocol* iprot) {
+
+ uint32_t xfer = 0;
+ std::string fname;
+ ::apache::thrift::protocol::TType ftype;
+ int16_t fid;
+
+ xfer += iprot->readStructBegin(fname);
+
+ using ::apache::thrift::protocol::TProtocolException;
+
+ bool isset_daysSinceEpoch = false;
+
+ while (true)
+ {
+ xfer += iprot->readFieldBegin(fname, ftype, fid);
+ if (ftype == ::apache::thrift::protocol::T_STOP) {
+ break;
+ }
+ switch (fid)
+ {
+ case 1:
+ if (ftype == ::apache::thrift::protocol::T_I64) {
+ xfer += iprot->readI64(this->daysSinceEpoch);
+ isset_daysSinceEpoch = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ default:
+ xfer += iprot->skip(ftype);
+ break;
+ }
+ xfer += iprot->readFieldEnd();
+ }
+
+ xfer += iprot->readStructEnd();
+
+ if (!isset_daysSinceEpoch)
+ throw TProtocolException(TProtocolException::INVALID_DATA);
+ return xfer;
+}
+
+uint32_t Date::write(::apache::thrift::protocol::TProtocol* oprot) const {
+ uint32_t xfer = 0;
+ xfer += oprot->writeStructBegin("Date");
+
+ xfer += oprot->writeFieldBegin("daysSinceEpoch", ::apache::thrift::protocol::T_I64, 1);
+ xfer += oprot->writeI64(this->daysSinceEpoch);
+ xfer += oprot->writeFieldEnd();
+
+ xfer += oprot->writeFieldStop();
+ xfer += oprot->writeStructEnd();
+ return xfer;
+}
+
+void swap(Date &a, Date &b) {
+ using ::std::swap;
+ swap(a.daysSinceEpoch, b.daysSinceEpoch);
+}
+
+const char* DateColumnStatsData::ascii_fingerprint = "D0719F3BBA8248297BB5287552897F59";
+const uint8_t DateColumnStatsData::binary_fingerprint[16] = {0xD0,0x71,0x9F,0x3B,0xBA,0x82,0x48,0x29,0x7B,0xB5,0x28,0x75,0x52,0x89,0x7F,0x59};
+
+uint32_t DateColumnStatsData::read(::apache::thrift::protocol::TProtocol* iprot) {
+
+ uint32_t xfer = 0;
+ std::string fname;
+ ::apache::thrift::protocol::TType ftype;
+ int16_t fid;
+
+ xfer += iprot->readStructBegin(fname);
+
+ using ::apache::thrift::protocol::TProtocolException;
+
+ bool isset_numNulls = false;
+ bool isset_numDVs = false;
+
+ while (true)
+ {
+ xfer += iprot->readFieldBegin(fname, ftype, fid);
+ if (ftype == ::apache::thrift::protocol::T_STOP) {
+ break;
+ }
+ switch (fid)
+ {
+ case 1:
+ if (ftype == ::apache::thrift::protocol::T_STRUCT) {
+ xfer += this->lowValue.read(iprot);
+ this->__isset.lowValue = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 2:
+ if (ftype == ::apache::thrift::protocol::T_STRUCT) {
+ xfer += this->highValue.read(iprot);
+ this->__isset.highValue = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 3:
+ if (ftype == ::apache::thrift::protocol::T_I64) {
+ xfer += iprot->readI64(this->numNulls);
+ isset_numNulls = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ case 4:
+ if (ftype == ::apache::thrift::protocol::T_I64) {
+ xfer += iprot->readI64(this->numDVs);
+ isset_numDVs = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
+ default:
+ xfer += iprot->skip(ftype);
+ break;
+ }
+ xfer += iprot->readFieldEnd();
+ }
+
+ xfer += iprot->readStructEnd();
+
+ if (!isset_numNulls)
+ throw TProtocolException(TProtocolException::INVALID_DATA);
+ if (!isset_numDVs)
+ throw TProtocolException(TProtocolException::INVALID_DATA);
+ return xfer;
+}
+
+uint32_t DateColumnStatsData::write(::apache::thrift::protocol::TProtocol* oprot) const {
+ uint32_t xfer = 0;
+ xfer += oprot->writeStructBegin("DateColumnStatsData");
+
+ if (this->__isset.lowValue) {
+ xfer += oprot->writeFieldBegin("lowValue", ::apache::thrift::protocol::T_STRUCT, 1);
+ xfer += this->lowValue.write(oprot);
+ xfer += oprot->writeFieldEnd();
+ }
+ if (this->__isset.highValue) {
+ xfer += oprot->writeFieldBegin("highValue", ::apache::thrift::protocol::T_STRUCT, 2);
+ xfer += this->highValue.write(oprot);
+ xfer += oprot->writeFieldEnd();
+ }
+ xfer += oprot->writeFieldBegin("numNulls", ::apache::thrift::protocol::T_I64, 3);
+ xfer += oprot->writeI64(this->numNulls);
+ xfer += oprot->writeFieldEnd();
+
+ xfer += oprot->writeFieldBegin("numDVs", ::apache::thrift::protocol::T_I64, 4);
+ xfer += oprot->writeI64(this->numDVs);
+ xfer += oprot->writeFieldEnd();
+
+ xfer += oprot->writeFieldStop();
+ xfer += oprot->writeStructEnd();
+ return xfer;
+}
+
+void swap(DateColumnStatsData &a, DateColumnStatsData &b) {
+ using ::std::swap;
+ swap(a.lowValue, b.lowValue);
+ swap(a.highValue, b.highValue);
+ swap(a.numNulls, b.numNulls);
+ swap(a.numDVs, b.numDVs);
+ swap(a.__isset, b.__isset);
+}
+
+const char* ColumnStatisticsData::ascii_fingerprint = "15E449CA15A23E37F2D54C31ACA52106";
+const uint8_t ColumnStatisticsData::binary_fingerprint[16] = {0x15,0xE4,0x49,0xCA,0x15,0xA2,0x3E,0x37,0xF2,0xD5,0x4C,0x31,0xAC,0xA5,0x21,0x06};

  uint32_t ColumnStatisticsData::read(::apache::thrift::protocol::TProtocol* iprot) {

@@ -4768,6 +4941,14 @@ uint32_t ColumnStatisticsData::read(::ap
            xfer += iprot->skip(ftype);
          }
          break;
+ case 7:
+ if (ftype == ::apache::thrift::protocol::T_STRUCT) {
+ xfer += this->dateStats.read(iprot);
+ this->__isset.dateStats = true;
+ } else {
+ xfer += iprot->skip(ftype);
+ }
+ break;
        default:
          xfer += iprot->skip(ftype);
          break;
@@ -4808,6 +4989,10 @@ uint32_t ColumnStatisticsData::write(::a
    xfer += this->decimalStats.write(oprot);
    xfer += oprot->writeFieldEnd();

+ xfer += oprot->writeFieldBegin("dateStats", ::apache::thrift::protocol::T_STRUCT, 7);
+ xfer += this->dateStats.write(oprot);
+ xfer += oprot->writeFieldEnd();
+
    xfer += oprot->writeFieldStop();
    xfer += oprot->writeStructEnd();
    return xfer;
@@ -4821,11 +5006,12 @@ void swap(ColumnStatisticsData &a, Colum
    swap(a.stringStats, b.stringStats);
    swap(a.binaryStats, b.binaryStats);
    swap(a.decimalStats, b.decimalStats);
+ swap(a.dateStats, b.dateStats);
    swap(a.__isset, b.__isset);
  }

-const char* ColumnStatisticsObj::ascii_fingerprint = "E49E62CFC71682004614EFEDAC3CD3F4";
-const uint8_t ColumnStatisticsObj::binary_fingerprint[16] = {0xE4,0x9E,0x62,0xCF,0xC7,0x16,0x82,0x00,0x46,0x14,0xEF,0xED,0xAC,0x3C,0xD3,0xF4};
+const char* ColumnStatisticsObj::ascii_fingerprint = "A82BA819C9FFEDDEBC0D50F6F4E46534";
+const uint8_t ColumnStatisticsObj::binary_fingerprint[16] = {0xA8,0x2B,0xA8,0x19,0xC9,0xFF,0xED,0xDE,0xBC,0x0D,0x50,0xF6,0xF4,0xE4,0x65,0x34};

  uint32_t ColumnStatisticsObj::read(::apache::thrift::protocol::TProtocol* iprot) {

@@ -5045,8 +5231,8 @@ void swap(ColumnStatisticsDesc &a, Colum
    swap(a.__isset, b.__isset);
  }

-const char* ColumnStatistics::ascii_fingerprint = "6682E234199B2CD3807B1ED420C6A7F8";
-const uint8_t ColumnStatistics::binary_fingerprint[16] = {0x66,0x82,0xE2,0x34,0x19,0x9B,0x2C,0xD3,0x80,0x7B,0x1E,0xD4,0x20,0xC6,0xA7,0xF8};
+const char* ColumnStatistics::ascii_fingerprint = "8A64D0A67FFD3A372726A320B3913D5A";
+const uint8_t ColumnStatistics::binary_fingerprint[16] = {0x8A,0x64,0xD0,0xA6,0x7F,0xFD,0x3A,0x37,0x27,0x26,0xA3,0x20,0xB3,0x91,0x3D,0x5A};

  uint32_t ColumnStatistics::read(::apache::thrift::protocol::TProtocol* iprot) {

@@ -5145,8 +5331,8 @@ void swap(ColumnStatistics &a, ColumnSta
    swap(a.statsObj, b.statsObj);
  }

-const char* AggrStats::ascii_fingerprint = "399BDBAF7503E0BFB5E1D99C83D790CD";
-const uint8_t AggrStats::binary_fingerprint[16] = {0x39,0x9B,0xDB,0xAF,0x75,0x03,0xE0,0xBF,0xB5,0xE1,0xD9,0x9C,0x83,0xD7,0x90,0xCD};
+const char* AggrStats::ascii_fingerprint = "1FC765A3AB2954591C14B0D946778213";
+const uint8_t AggrStats::binary_fingerprint[16] = {0x1F,0xC7,0x65,0xA3,0xAB,0x29,0x54,0x59,0x1C,0x14,0xB0,0xD9,0x46,0x77,0x82,0x13};

  uint32_t AggrStats::read(::apache::thrift::protocol::TProtocol* iprot) {

@@ -5245,8 +5431,8 @@ void swap(AggrStats &a, AggrStats &b) {
    swap(a.partsFound, b.partsFound);
  }

-const char* SetPartitionsStatsRequest::ascii_fingerprint = "635C0DA9A947DA57AAE693A5DFB86569";
-const uint8_t SetPartitionsStatsRequest::binary_fingerprint[16] = {0x63,0x5C,0x0D,0xA9,0xA9,0x47,0xDA,0x57,0xAA,0xE6,0x93,0xA5,0xDF,0xB8,0x65,0x69};
+const char* SetPartitionsStatsRequest::ascii_fingerprint = "8BCBF2CE9E942D0FB7D8F4ED57D8DC27";
+const uint8_t SetPartitionsStatsRequest::binary_fingerprint[16] = {0x8B,0xCB,0xF2,0xCE,0x9E,0x94,0x2D,0x0F,0xB7,0xD8,0xF4,0xED,0x57,0xD8,0xDC,0x27};

  uint32_t SetPartitionsStatsRequest::read(::apache::thrift::protocol::TProtocol* iprot) {

@@ -5759,8 +5945,8 @@ void swap(PartitionsByExprRequest &a, Pa
    swap(a.__isset, b.__isset);
  }

-const char* TableStatsResult::ascii_fingerprint = "CE3E8F0D9B310B8D33CB7A89A75F3E05";
-const uint8_t TableStatsResult::binary_fingerprint[16] = {0xCE,0x3E,0x8F,0x0D,0x9B,0x31,0x0B,0x8D,0x33,0xCB,0x7A,0x89,0xA7,0x5F,0x3E,0x05};
+const char* TableStatsResult::ascii_fingerprint = "2E398BAC949C93A194F1A19CA5074FF8";
+const uint8_t TableStatsResult::binary_fingerprint[16] = {0x2E,0x39,0x8B,0xAC,0x94,0x9C,0x93,0xA1,0x94,0xF1,0xA1,0x9C,0xA5,0x07,0x4F,0xF8};

  uint32_t TableStatsResult::read(::apache::thrift::protocol::TProtocol* iprot) {

@@ -5843,8 +6029,8 @@ void swap(TableStatsResult &a, TableStat
    swap(a.tableStats, b.tableStats);
  }

-const char* PartitionsStatsResult::ascii_fingerprint = "FF175B50C5EF6F442D3AF25B06435A39";
-const uint8_t PartitionsStatsResult::binary_fingerprint[16] = {0xFF,0x17,0x5B,0x50,0xC5,0xEF,0x6F,0x44,0x2D,0x3A,0xF2,0x5B,0x06,0x43,0x5A,0x39};
+const char* PartitionsStatsResult::ascii_fingerprint = "DD38B4AA7D02E288659AA12CBAAEF33B";
+const uint8_t PartitionsStatsResult::binary_fingerprint[16] = {0xDD,0x38,0xB4,0xAA,0x7D,0x02,0xE2,0x88,0x65,0x9A,0xA1,0x2C,0xBA,0xAE,0xF3,0x3B};

  uint32_t PartitionsStatsResult::read(::apache::thrift::protocol::TProtocol* iprot) {


Modified: hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h Tue Apr 14 20:47:29 2015
@@ -2652,21 +2652,130 @@ class DecimalColumnStatsData {

  void swap(DecimalColumnStatsData &a, DecimalColumnStatsData &b);

+
+class Date {
+ public:
+
+ static const char* ascii_fingerprint; // = "56A59CE7FFAF82BCA8A19FAACDE4FB75";
+ static const uint8_t binary_fingerprint[16]; // = {0x56,0xA5,0x9C,0xE7,0xFF,0xAF,0x82,0xBC,0xA8,0xA1,0x9F,0xAA,0xCD,0xE4,0xFB,0x75};
+
+ Date() : daysSinceEpoch(0) {
+ }
+
+ virtual ~Date() throw() {}
+
+ int64_t daysSinceEpoch;
+
+ void __set_daysSinceEpoch(const int64_t val) {
+ daysSinceEpoch = val;
+ }
+
+ bool operator == (const Date & rhs) const
+ {
+ if (!(daysSinceEpoch == rhs.daysSinceEpoch))
+ return false;
+ return true;
+ }
+ bool operator != (const Date &rhs) const {
+ return !(*this == rhs);
+ }
+
+ bool operator < (const Date & ) const;
+
+ uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+ uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+};
+
+void swap(Date &a, Date &b);
+
+typedef struct _DateColumnStatsData__isset {
+ _DateColumnStatsData__isset() : lowValue(false), highValue(false) {}
+ bool lowValue;
+ bool highValue;
+} _DateColumnStatsData__isset;
+
+class DateColumnStatsData {
+ public:
+
+ static const char* ascii_fingerprint; // = "D0719F3BBA8248297BB5287552897F59";
+ static const uint8_t binary_fingerprint[16]; // = {0xD0,0x71,0x9F,0x3B,0xBA,0x82,0x48,0x29,0x7B,0xB5,0x28,0x75,0x52,0x89,0x7F,0x59};
+
+ DateColumnStatsData() : numNulls(0), numDVs(0) {
+ }
+
+ virtual ~DateColumnStatsData() throw() {}
+
+ Date lowValue;
+ Date highValue;
+ int64_t numNulls;
+ int64_t numDVs;
+
+ _DateColumnStatsData__isset __isset;
+
+ void __set_lowValue(const Date& val) {
+ lowValue = val;
+ __isset.lowValue = true;
+ }
+
+ void __set_highValue(const Date& val) {
+ highValue = val;
+ __isset.highValue = true;
+ }
+
+ void __set_numNulls(const int64_t val) {
+ numNulls = val;
+ }
+
+ void __set_numDVs(const int64_t val) {
+ numDVs = val;
+ }
+
+ bool operator == (const DateColumnStatsData & rhs) const
+ {
+ if (__isset.lowValue != rhs.__isset.lowValue)
+ return false;
+ else if (__isset.lowValue && !(lowValue == rhs.lowValue))
+ return false;
+ if (__isset.highValue != rhs.__isset.highValue)
+ return false;
+ else if (__isset.highValue && !(highValue == rhs.highValue))
+ return false;
+ if (!(numNulls == rhs.numNulls))
+ return false;
+ if (!(numDVs == rhs.numDVs))
+ return false;
+ return true;
+ }
+ bool operator != (const DateColumnStatsData &rhs) const {
+ return !(*this == rhs);
+ }
+
+ bool operator < (const DateColumnStatsData & ) const;
+
+ uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+ uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+};
+
+void swap(DateColumnStatsData &a, DateColumnStatsData &b);
+
  typedef struct _ColumnStatisticsData__isset {
- _ColumnStatisticsData__isset() : booleanStats(false), longStats(false), doubleStats(false), stringStats(false), binaryStats(false), decimalStats(false) {}
+ _ColumnStatisticsData__isset() : booleanStats(false), longStats(false), doubleStats(false), stringStats(false), binaryStats(false), decimalStats(false), dateStats(false) {}
    bool booleanStats;
    bool longStats;
    bool doubleStats;
    bool stringStats;
    bool binaryStats;
    bool decimalStats;
+ bool dateStats;
  } _ColumnStatisticsData__isset;

  class ColumnStatisticsData {
   public:

- static const char* ascii_fingerprint; // = "D079ACEA6EE0998D0A45CB65FF1EAADD";
- static const uint8_t binary_fingerprint[16]; // = {0xD0,0x79,0xAC,0xEA,0x6E,0xE0,0x99,0x8D,0x0A,0x45,0xCB,0x65,0xFF,0x1E,0xAA,0xDD};
+ static const char* ascii_fingerprint; // = "15E449CA15A23E37F2D54C31ACA52106";
+ static const uint8_t binary_fingerprint[16]; // = {0x15,0xE4,0x49,0xCA,0x15,0xA2,0x3E,0x37,0xF2,0xD5,0x4C,0x31,0xAC,0xA5,0x21,0x06};

    ColumnStatisticsData() {
    }
@@ -2679,6 +2788,7 @@ class ColumnStatisticsData {
    StringColumnStatsData stringStats;
    BinaryColumnStatsData binaryStats;
    DecimalColumnStatsData decimalStats;
+ DateColumnStatsData dateStats;

    _ColumnStatisticsData__isset __isset;

@@ -2706,6 +2816,10 @@ class ColumnStatisticsData {
      decimalStats = val;
    }

+ void __set_dateStats(const DateColumnStatsData& val) {
+ dateStats = val;
+ }
+
    bool operator == (const ColumnStatisticsData & rhs) const
    {
      if (!(booleanStats == rhs.booleanStats))
@@ -2720,6 +2834,8 @@ class ColumnStatisticsData {
        return false;
      if (!(decimalStats == rhs.decimalStats))
        return false;
+ if (!(dateStats == rhs.dateStats))
+ return false;
      return true;
    }
    bool operator != (const ColumnStatisticsData &rhs) const {
@@ -2739,8 +2855,8 @@ void swap(ColumnStatisticsData &a, Colum
  class ColumnStatisticsObj {
   public:

- static const char* ascii_fingerprint; // = "E49E62CFC71682004614EFEDAC3CD3F4";
- static const uint8_t binary_fingerprint[16]; // = {0xE4,0x9E,0x62,0xCF,0xC7,0x16,0x82,0x00,0x46,0x14,0xEF,0xED,0xAC,0x3C,0xD3,0xF4};
+ static const char* ascii_fingerprint; // = "A82BA819C9FFEDDEBC0D50F6F4E46534";
+ static const uint8_t binary_fingerprint[16]; // = {0xA8,0x2B,0xA8,0x19,0xC9,0xFF,0xED,0xDE,0xBC,0x0D,0x50,0xF6,0xF4,0xE4,0x65,0x34};

    ColumnStatisticsObj() : colName(), colType() {
    }
@@ -2868,8 +2984,8 @@ void swap(ColumnStatisticsDesc &a, Colum
  class ColumnStatistics {
   public:

- static const char* ascii_fingerprint; // = "6682E234199B2CD3807B1ED420C6A7F8";
- static const uint8_t binary_fingerprint[16]; // = {0x66,0x82,0xE2,0x34,0x19,0x9B,0x2C,0xD3,0x80,0x7B,0x1E,0xD4,0x20,0xC6,0xA7,0xF8};
+ static const char* ascii_fingerprint; // = "8A64D0A67FFD3A372726A320B3913D5A";
+ static const uint8_t binary_fingerprint[16]; // = {0x8A,0x64,0xD0,0xA6,0x7F,0xFD,0x3A,0x37,0x27,0x26,0xA3,0x20,0xB3,0x91,0x3D,0x5A};

    ColumnStatistics() {
    }
@@ -2912,8 +3028,8 @@ void swap(ColumnStatistics &a, ColumnSta
  class AggrStats {
   public:

- static const char* ascii_fingerprint; // = "399BDBAF7503E0BFB5E1D99C83D790CD";
- static const uint8_t binary_fingerprint[16]; // = {0x39,0x9B,0xDB,0xAF,0x75,0x03,0xE0,0xBF,0xB5,0xE1,0xD9,0x9C,0x83,0xD7,0x90,0xCD};
+ static const char* ascii_fingerprint; // = "1FC765A3AB2954591C14B0D946778213";
+ static const uint8_t binary_fingerprint[16]; // = {0x1F,0xC7,0x65,0xA3,0xAB,0x29,0x54,0x59,0x1C,0x14,0xB0,0xD9,0x46,0x77,0x82,0x13};

    AggrStats() : partsFound(0) {
    }
@@ -2956,8 +3072,8 @@ void swap(AggrStats &a, AggrStats &b);
  class SetPartitionsStatsRequest {
   public:

- static const char* ascii_fingerprint; // = "635C0DA9A947DA57AAE693A5DFB86569";
- static const uint8_t binary_fingerprint[16]; // = {0x63,0x5C,0x0D,0xA9,0xA9,0x47,0xDA,0x57,0xAA,0xE6,0x93,0xA5,0xDF,0xB8,0x65,0x69};
+ static const char* ascii_fingerprint; // = "8BCBF2CE9E942D0FB7D8F4ED57D8DC27";
+ static const uint8_t binary_fingerprint[16]; // = {0x8B,0xCB,0xF2,0xCE,0x9E,0x94,0x2D,0x0F,0xB7,0xD8,0xF4,0xED,0x57,0xD8,0xDC,0x27};

    SetPartitionsStatsRequest() {
    }
@@ -3209,8 +3325,8 @@ void swap(PartitionsByExprRequest &a, Pa
  class TableStatsResult {
   public:

- static const char* ascii_fingerprint; // = "CE3E8F0D9B310B8D33CB7A89A75F3E05";
- static const uint8_t binary_fingerprint[16]; // = {0xCE,0x3E,0x8F,0x0D,0x9B,0x31,0x0B,0x8D,0x33,0xCB,0x7A,0x89,0xA7,0x5F,0x3E,0x05};
+ static const char* ascii_fingerprint; // = "2E398BAC949C93A194F1A19CA5074FF8";
+ static const uint8_t binary_fingerprint[16]; // = {0x2E,0x39,0x8B,0xAC,0x94,0x9C,0x93,0xA1,0x94,0xF1,0xA1,0x9C,0xA5,0x07,0x4F,0xF8};

    TableStatsResult() {
    }
@@ -3246,8 +3362,8 @@ void swap(TableStatsResult &a, TableStat
  class PartitionsStatsResult {
   public:

- static const char* ascii_fingerprint; // = "FF175B50C5EF6F442D3AF25B06435A39";
- static const uint8_t binary_fingerprint[16]; // = {0xFF,0x17,0x5B,0x50,0xC5,0xEF,0x6F,0x44,0x2D,0x3A,0xF2,0x5B,0x06,0x43,0x5A,0x39};
+ static const char* ascii_fingerprint; // = "DD38B4AA7D02E288659AA12CBAAEF33B";
+ static const uint8_t binary_fingerprint[16]; // = {0xDD,0x38,0xB4,0xAA,0x7D,0x02,0xE2,0x88,0x65,0x9A,0xA1,0x2C,0xBA,0xAE,0xF3,0x3B};

    PartitionsStatsResult() {
    }

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddDynamicPartitions.java Tue Apr 14 20:47:29 2015
@@ -633,7 +633,7 @@ public class AddDynamicPartitions implem
                  struct.partitionnames = new ArrayList<String>(_list492.size);
                  for (int _i493 = 0; _i493 < _list492.size; ++_i493)
                  {
- String _elem494; // required
+ String _elem494; // optional
                    _elem494 = iprot.readString();
                    struct.partitionnames.add(_elem494);
                  }
@@ -725,7 +725,7 @@ public class AddDynamicPartitions implem
          struct.partitionnames = new ArrayList<String>(_list497.size);
          for (int _i498 = 0; _i498 < _list497.size; ++_i498)
          {
- String _elem499; // required
+ String _elem499; // optional
            _elem499 = iprot.readString();
            struct.partitionnames.add(_elem499);
          }

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsRequest.java Tue Apr 14 20:47:29 2015
@@ -704,7 +704,7 @@ public class AddPartitionsRequest implem
                  struct.parts = new ArrayList<Partition>(_list388.size);
                  for (int _i389 = 0; _i389 < _list388.size; ++_i389)
                  {
- Partition _elem390; // required
+ Partition _elem390; // optional
                    _elem390 = new Partition();
                    _elem390.read(iprot);
                    struct.parts.add(_elem390);
@@ -824,7 +824,7 @@ public class AddPartitionsRequest implem
          struct.parts = new ArrayList<Partition>(_list393.size);
          for (int _i394 = 0; _i394 < _list393.size; ++_i394)
          {
- Partition _elem395; // required
+ Partition _elem395; // optional
            _elem395 = new Partition();
            _elem395.read(iprot);
            struct.parts.add(_elem395);

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AddPartitionsResult.java Tue Apr 14 20:47:29 2015
@@ -346,7 +346,7 @@ public class AddPartitionsResult impleme
                  struct.partitions = new ArrayList<Partition>(_list380.size);
                  for (int _i381 = 0; _i381 < _list380.size; ++_i381)
                  {
- Partition _elem382; // required
+ Partition _elem382; // optional
                    _elem382 = new Partition();
                    _elem382.read(iprot);
                    struct.partitions.add(_elem382);
@@ -428,7 +428,7 @@ public class AddPartitionsResult impleme
            struct.partitions = new ArrayList<Partition>(_list385.size);
            for (int _i386 = 0; _i386 < _list385.size; ++_i386)
            {
- Partition _elem387; // required
+ Partition _elem387; // optional
              _elem387 = new Partition();
              _elem387.read(iprot);
              struct.partitions.add(_elem387);

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AggrStats.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AggrStats.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AggrStats.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AggrStats.java Tue Apr 14 20:47:29 2015
@@ -439,7 +439,7 @@ public class AggrStats implements org.ap
                  struct.colStats = new ArrayList<ColumnStatisticsObj>(_list278.size);
                  for (int _i279 = 0; _i279 < _list278.size; ++_i279)
                  {
- ColumnStatisticsObj _elem280; // required
+ ColumnStatisticsObj _elem280; // optional
                    _elem280 = new ColumnStatisticsObj();
                    _elem280.read(iprot);
                    struct.colStats.add(_elem280);
@@ -522,7 +522,7 @@ public class AggrStats implements org.ap
          struct.colStats = new ArrayList<ColumnStatisticsObj>(_list283.size);
          for (int _i284 = 0; _i284 < _list283.size; ++_i284)
          {
- ColumnStatisticsObj _elem285; // required
+ ColumnStatisticsObj _elem285; // optional
            _elem285 = new ColumnStatisticsObj();
            _elem285.read(iprot);
            struct.colStats.add(_elem285);

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatistics.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatistics.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatistics.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatistics.java Tue Apr 14 20:47:29 2015
@@ -451,7 +451,7 @@ public class ColumnStatistics implements
                  struct.statsObj = new ArrayList<ColumnStatisticsObj>(_list270.size);
                  for (int _i271 = 0; _i271 < _list270.size; ++_i271)
                  {
- ColumnStatisticsObj _elem272; // required
+ ColumnStatisticsObj _elem272; // optional
                    _elem272 = new ColumnStatisticsObj();
                    _elem272.read(iprot);
                    struct.statsObj.add(_elem272);
@@ -531,7 +531,7 @@ public class ColumnStatistics implements
          struct.statsObj = new ArrayList<ColumnStatisticsObj>(_list275.size);
          for (int _i276 = 0; _i276 < _list275.size; ++_i276)
          {
- ColumnStatisticsObj _elem277; // required
+ ColumnStatisticsObj _elem277; // optional
            _elem277 = new ColumnStatisticsObj();
            _elem277.read(iprot);
            struct.statsObj.add(_elem277);

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsData.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsData.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsData.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ColumnStatisticsData.java Tue Apr 14 20:47:29 2015
@@ -39,6 +39,7 @@ public class ColumnStatisticsData extend
    private static final org.apache.thrift.protocol.TField STRING_STATS_FIELD_DESC = new org.apache.thrift.protocol.TField("stringStats", org.apache.thrift.protocol.TType.STRUCT, (short)4);
    private static final org.apache.thrift.protocol.TField BINARY_STATS_FIELD_DESC = new org.apache.thrift.protocol.TField("binaryStats", org.apache.thrift.protocol.TType.STRUCT, (short)5);
    private static final org.apache.thrift.protocol.TField DECIMAL_STATS_FIELD_DESC = new org.apache.thrift.protocol.TField("decimalStats", org.apache.thrift.protocol.TType.STRUCT, (short)6);
+ private static final org.apache.thrift.protocol.TField DATE_STATS_FIELD_DESC = new org.apache.thrift.protocol.TField("dateStats", org.apache.thrift.protocol.TType.STRUCT, (short)7);

    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
@@ -47,7 +48,8 @@ public class ColumnStatisticsData extend
      DOUBLE_STATS((short)3, "doubleStats"),
      STRING_STATS((short)4, "stringStats"),
      BINARY_STATS((short)5, "binaryStats"),
- DECIMAL_STATS((short)6, "decimalStats");
+ DECIMAL_STATS((short)6, "decimalStats"),
+ DATE_STATS((short)7, "dateStats");

      private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();

@@ -74,6 +76,8 @@ public class ColumnStatisticsData extend
            return BINARY_STATS;
          case 6: // DECIMAL_STATS
            return DECIMAL_STATS;
+ case 7: // DATE_STATS
+ return DATE_STATS;
          default:
            return null;
        }
@@ -128,6 +132,8 @@ public class ColumnStatisticsData extend
          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, BinaryColumnStatsData.class)));
      tmpMap.put(_Fields.DECIMAL_STATS, new org.apache.thrift.meta_data.FieldMetaData("decimalStats", org.apache.thrift.TFieldRequirementType.DEFAULT,
          new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, DecimalColumnStatsData.class)));
+ tmpMap.put(_Fields.DATE_STATS, new org.apache.thrift.meta_data.FieldMetaData("dateStats", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, DateColumnStatsData.class)));
      metaDataMap = Collections.unmodifiableMap(tmpMap);
      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ColumnStatisticsData.class, metaDataMap);
    }
@@ -183,6 +189,12 @@ public class ColumnStatisticsData extend
      return x;
    }

+ public static ColumnStatisticsData dateStats(DateColumnStatsData value) {
+ ColumnStatisticsData x = new ColumnStatisticsData();
+ x.setDateStats(value);
+ return x;
+ }
+

    @Override
    protected void checkType(_Fields setField, Object value) throws ClassCastException {
@@ -217,6 +229,11 @@ public class ColumnStatisticsData extend
            break;
          }
          throw new ClassCastException("Was expecting value of type DecimalColumnStatsData for field 'decimalStats', but got " + value.getClass().getSimpleName());
+ case DATE_STATS:
+ if (value instanceof DateColumnStatsData) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type DateColumnStatsData for field 'dateStats', but got " + value.getClass().getSimpleName());
        default:
          throw new IllegalArgumentException("Unknown field id " + setField);
      }
@@ -287,6 +304,16 @@ public class ColumnStatisticsData extend
              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
              return null;
            }
+ case DATE_STATS:
+ if (field.type == DATE_STATS_FIELD_DESC.type) {
+ DateColumnStatsData dateStats;
+ dateStats = new DateColumnStatsData();
+ dateStats.read(iprot);
+ return dateStats;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
          default:
            throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
        }
@@ -322,6 +349,10 @@ public class ColumnStatisticsData extend
          DecimalColumnStatsData decimalStats = (DecimalColumnStatsData)value_;
          decimalStats.write(oprot);
          return;
+ case DATE_STATS:
+ DateColumnStatsData dateStats = (DateColumnStatsData)value_;
+ dateStats.write(oprot);
+ return;
        default:
          throw new IllegalStateException("Cannot write union with unknown field " + setField_);
      }
@@ -362,6 +393,11 @@ public class ColumnStatisticsData extend
            decimalStats = new DecimalColumnStatsData();
            decimalStats.read(iprot);
            return decimalStats;
+ case DATE_STATS:
+ DateColumnStatsData dateStats;
+ dateStats = new DateColumnStatsData();
+ dateStats.read(iprot);
+ return dateStats;
          default:
            throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
        }
@@ -397,6 +433,10 @@ public class ColumnStatisticsData extend
          DecimalColumnStatsData decimalStats = (DecimalColumnStatsData)value_;
          decimalStats.write(oprot);
          return;
+ case DATE_STATS:
+ DateColumnStatsData dateStats = (DateColumnStatsData)value_;
+ dateStats.write(oprot);
+ return;
        default:
          throw new IllegalStateException("Cannot write union with unknown field " + setField_);
      }
@@ -417,6 +457,8 @@ public class ColumnStatisticsData extend
          return BINARY_STATS_FIELD_DESC;
        case DECIMAL_STATS:
          return DECIMAL_STATS_FIELD_DESC;
+ case DATE_STATS:
+ return DATE_STATS_FIELD_DESC;
        default:
          throw new IllegalArgumentException("Unknown field id " + setField);
      }
@@ -521,6 +563,20 @@ public class ColumnStatisticsData extend
      value_ = value;
    }

+ public DateColumnStatsData getDateStats() {
+ if (getSetField() == _Fields.DATE_STATS) {
+ return (DateColumnStatsData)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'dateStats' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void setDateStats(DateColumnStatsData value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.DATE_STATS;
+ value_ = value;
+ }
+
    public boolean isSetBooleanStats() {
      return setField_ == _Fields.BOOLEAN_STATS;
    }
@@ -551,6 +607,11 @@ public class ColumnStatisticsData extend
    }


+ public boolean isSetDateStats() {
+ return setField_ == _Fields.DATE_STATS;
+ }
+
+
    public boolean equals(Object other) {
      if (other instanceof ColumnStatisticsData) {
        return equals((ColumnStatisticsData)other);

Added: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Date.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Date.java?rev=1673553&view=auto
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Date.java (added)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Date.java Tue Apr 14 20:47:29 2015
@@ -0,0 +1,383 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.0)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package org.apache.hadoop.hive.metastore.api;
+
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class Date implements org.apache.thrift.TBase<Date, Date._Fields>, java.io.Serializable, Cloneable {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Date");
+
+ private static final org.apache.thrift.protocol.TField DAYS_SINCE_EPOCH_FIELD_DESC = new org.apache.thrift.protocol.TField("daysSinceEpoch", org.apache.thrift.protocol.TType.I64, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new DateStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new DateTupleSchemeFactory());
+ }
+
+ private long daysSinceEpoch; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ DAYS_SINCE_EPOCH((short)1, "daysSinceEpoch");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // DAYS_SINCE_EPOCH
+ return DAYS_SINCE_EPOCH;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ private static final int __DAYSSINCEEPOCH_ISSET_ID = 0;
+ private byte __isset_bitfield = 0;
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.DAYS_SINCE_EPOCH, new org.apache.thrift.meta_data.FieldMetaData("daysSinceEpoch", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Date.class, metaDataMap);
+ }
+
+ public Date() {
+ }
+
+ public Date(
+ long daysSinceEpoch)
+ {
+ this();
+ this.daysSinceEpoch = daysSinceEpoch;
+ setDaysSinceEpochIsSet(true);
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public Date(Date other) {
+ __isset_bitfield = other.__isset_bitfield;
+ this.daysSinceEpoch = other.daysSinceEpoch;
+ }
+
+ public Date deepCopy() {
+ return new Date(this);
+ }
+
+ @Override
+ public void clear() {
+ setDaysSinceEpochIsSet(false);
+ this.daysSinceEpoch = 0;
+ }
+
+ public long getDaysSinceEpoch() {
+ return this.daysSinceEpoch;
+ }
+
+ public void setDaysSinceEpoch(long daysSinceEpoch) {
+ this.daysSinceEpoch = daysSinceEpoch;
+ setDaysSinceEpochIsSet(true);
+ }
+
+ public void unsetDaysSinceEpoch() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __DAYSSINCEEPOCH_ISSET_ID);
+ }
+
+ /** Returns true if field daysSinceEpoch is set (has been assigned a value) and false otherwise */
+ public boolean isSetDaysSinceEpoch() {
+ return EncodingUtils.testBit(__isset_bitfield, __DAYSSINCEEPOCH_ISSET_ID);
+ }
+
+ public void setDaysSinceEpochIsSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __DAYSSINCEEPOCH_ISSET_ID, value);
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case DAYS_SINCE_EPOCH:
+ if (value == null) {
+ unsetDaysSinceEpoch();
+ } else {
+ setDaysSinceEpoch((Long)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case DAYS_SINCE_EPOCH:
+ return Long.valueOf(getDaysSinceEpoch());
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case DAYS_SINCE_EPOCH:
+ return isSetDaysSinceEpoch();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof Date)
+ return this.equals((Date)that);
+ return false;
+ }
+
+ public boolean equals(Date that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_daysSinceEpoch = true;
+ boolean that_present_daysSinceEpoch = true;
+ if (this_present_daysSinceEpoch || that_present_daysSinceEpoch) {
+ if (!(this_present_daysSinceEpoch && that_present_daysSinceEpoch))
+ return false;
+ if (this.daysSinceEpoch != that.daysSinceEpoch)
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ HashCodeBuilder builder = new HashCodeBuilder();
+
+ boolean present_daysSinceEpoch = true;
+ builder.append(present_daysSinceEpoch);
+ if (present_daysSinceEpoch)
+ builder.append(daysSinceEpoch);
+
+ return builder.toHashCode();
+ }
+
+ public int compareTo(Date other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+ Date typedOther = (Date)other;
+
+ lastComparison = Boolean.valueOf(isSetDaysSinceEpoch()).compareTo(typedOther.isSetDaysSinceEpoch());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetDaysSinceEpoch()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.daysSinceEpoch, typedOther.daysSinceEpoch);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("Date(");
+ boolean first = true;
+
+ sb.append("daysSinceEpoch:");
+ sb.append(this.daysSinceEpoch);
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!isSetDaysSinceEpoch()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'daysSinceEpoch' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bitfield = 0;
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class DateStandardSchemeFactory implements SchemeFactory {
+ public DateStandardScheme getScheme() {
+ return new DateStandardScheme();
+ }
+ }
+
+ private static class DateStandardScheme extends StandardScheme<Date> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, Date struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // DAYS_SINCE_EPOCH
+ if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
+ struct.daysSinceEpoch = iprot.readI64();
+ struct.setDaysSinceEpochIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, Date struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ oprot.writeFieldBegin(DAYS_SINCE_EPOCH_FIELD_DESC);
+ oprot.writeI64(struct.daysSinceEpoch);
+ oprot.writeFieldEnd();
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class DateTupleSchemeFactory implements SchemeFactory {
+ public DateTupleScheme getScheme() {
+ return new DateTupleScheme();
+ }
+ }
+
+ private static class DateTupleScheme extends TupleScheme<Date> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, Date struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeI64(struct.daysSinceEpoch);
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, Date struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.daysSinceEpoch = iprot.readI64();
+ struct.setDaysSinceEpochIsSet(true);
+ }
+ }
+
+}
+

Added: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java?rev=1673553&view=auto
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java (added)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java Tue Apr 14 20:47:29 2015
@@ -0,0 +1,704 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.0)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package org.apache.hadoop.hive.metastore.api;
+
+import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class DateColumnStatsData implements org.apache.thrift.TBase<DateColumnStatsData, DateColumnStatsData._Fields>, java.io.Serializable, Cloneable {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DateColumnStatsData");
+
+ private static final org.apache.thrift.protocol.TField LOW_VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("lowValue", org.apache.thrift.protocol.TType.STRUCT, (short)1);
+ private static final org.apache.thrift.protocol.TField HIGH_VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("highValue", org.apache.thrift.protocol.TType.STRUCT, (short)2);
+ private static final org.apache.thrift.protocol.TField NUM_NULLS_FIELD_DESC = new org.apache.thrift.protocol.TField("numNulls", org.apache.thrift.protocol.TType.I64, (short)3);
+ private static final org.apache.thrift.protocol.TField NUM_DVS_FIELD_DESC = new org.apache.thrift.protocol.TField("numDVs", org.apache.thrift.protocol.TType.I64, (short)4);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new DateColumnStatsDataStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new DateColumnStatsDataTupleSchemeFactory());
+ }
+
+ private Date lowValue; // optional
+ private Date highValue; // optional
+ private long numNulls; // required
+ private long numDVs; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ LOW_VALUE((short)1, "lowValue"),
+ HIGH_VALUE((short)2, "highValue"),
+ NUM_NULLS((short)3, "numNulls"),
+ NUM_DVS((short)4, "numDVs");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // LOW_VALUE
+ return LOW_VALUE;
+ case 2: // HIGH_VALUE
+ return HIGH_VALUE;
+ case 3: // NUM_NULLS
+ return NUM_NULLS;
+ case 4: // NUM_DVS
+ return NUM_DVS;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ private static final int __NUMNULLS_ISSET_ID = 0;
+ private static final int __NUMDVS_ISSET_ID = 1;
+ private byte __isset_bitfield = 0;
+ private _Fields optionals[] = {_Fields.LOW_VALUE,_Fields.HIGH_VALUE};
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.LOW_VALUE, new org.apache.thrift.meta_data.FieldMetaData("lowValue", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Date.class)));
+ tmpMap.put(_Fields.HIGH_VALUE, new org.apache.thrift.meta_data.FieldMetaData("highValue", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Date.class)));
+ tmpMap.put(_Fields.NUM_NULLS, new org.apache.thrift.meta_data.FieldMetaData("numNulls", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
+ tmpMap.put(_Fields.NUM_DVS, new org.apache.thrift.meta_data.FieldMetaData("numDVs", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(DateColumnStatsData.class, metaDataMap);
+ }
+
+ public DateColumnStatsData() {
+ }
+
+ public DateColumnStatsData(
+ long numNulls,
+ long numDVs)
+ {
+ this();
+ this.numNulls = numNulls;
+ setNumNullsIsSet(true);
+ this.numDVs = numDVs;
+ setNumDVsIsSet(true);
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public DateColumnStatsData(DateColumnStatsData other) {
+ __isset_bitfield = other.__isset_bitfield;
+ if (other.isSetLowValue()) {
+ this.lowValue = new Date(other.lowValue);
+ }
+ if (other.isSetHighValue()) {
+ this.highValue = new Date(other.highValue);
+ }
+ this.numNulls = other.numNulls;
+ this.numDVs = other.numDVs;
+ }
+
+ public DateColumnStatsData deepCopy() {
+ return new DateColumnStatsData(this);
+ }
+
+ @Override
+ public void clear() {
+ this.lowValue = null;
+ this.highValue = null;
+ setNumNullsIsSet(false);
+ this.numNulls = 0;
+ setNumDVsIsSet(false);
+ this.numDVs = 0;
+ }
+
+ public Date getLowValue() {
+ return this.lowValue;
+ }
+
+ public void setLowValue(Date lowValue) {
+ this.lowValue = lowValue;
+ }
+
+ public void unsetLowValue() {
+ this.lowValue = null;
+ }
+
+ /** Returns true if field lowValue is set (has been assigned a value) and false otherwise */
+ public boolean isSetLowValue() {
+ return this.lowValue != null;
+ }
+
+ public void setLowValueIsSet(boolean value) {
+ if (!value) {
+ this.lowValue = null;
+ }
+ }
+
+ public Date getHighValue() {
+ return this.highValue;
+ }
+
+ public void setHighValue(Date highValue) {
+ this.highValue = highValue;
+ }
+
+ public void unsetHighValue() {
+ this.highValue = null;
+ }
+
+ /** Returns true if field highValue is set (has been assigned a value) and false otherwise */
+ public boolean isSetHighValue() {
+ return this.highValue != null;
+ }
+
+ public void setHighValueIsSet(boolean value) {
+ if (!value) {
+ this.highValue = null;
+ }
+ }
+
+ public long getNumNulls() {
+ return this.numNulls;
+ }
+
+ public void setNumNulls(long numNulls) {
+ this.numNulls = numNulls;
+ setNumNullsIsSet(true);
+ }
+
+ public void unsetNumNulls() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __NUMNULLS_ISSET_ID);
+ }
+
+ /** Returns true if field numNulls is set (has been assigned a value) and false otherwise */
+ public boolean isSetNumNulls() {
+ return EncodingUtils.testBit(__isset_bitfield, __NUMNULLS_ISSET_ID);
+ }
+
+ public void setNumNullsIsSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __NUMNULLS_ISSET_ID, value);
+ }
+
+ public long getNumDVs() {
+ return this.numDVs;
+ }
+
+ public void setNumDVs(long numDVs) {
+ this.numDVs = numDVs;
+ setNumDVsIsSet(true);
+ }
+
+ public void unsetNumDVs() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __NUMDVS_ISSET_ID);
+ }
+
+ /** Returns true if field numDVs is set (has been assigned a value) and false otherwise */
+ public boolean isSetNumDVs() {
+ return EncodingUtils.testBit(__isset_bitfield, __NUMDVS_ISSET_ID);
+ }
+
+ public void setNumDVsIsSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __NUMDVS_ISSET_ID, value);
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case LOW_VALUE:
+ if (value == null) {
+ unsetLowValue();
+ } else {
+ setLowValue((Date)value);
+ }
+ break;
+
+ case HIGH_VALUE:
+ if (value == null) {
+ unsetHighValue();
+ } else {
+ setHighValue((Date)value);
+ }
+ break;
+
+ case NUM_NULLS:
+ if (value == null) {
+ unsetNumNulls();
+ } else {
+ setNumNulls((Long)value);
+ }
+ break;
+
+ case NUM_DVS:
+ if (value == null) {
+ unsetNumDVs();
+ } else {
+ setNumDVs((Long)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case LOW_VALUE:
+ return getLowValue();
+
+ case HIGH_VALUE:
+ return getHighValue();
+
+ case NUM_NULLS:
+ return Long.valueOf(getNumNulls());
+
+ case NUM_DVS:
+ return Long.valueOf(getNumDVs());
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case LOW_VALUE:
+ return isSetLowValue();
+ case HIGH_VALUE:
+ return isSetHighValue();
+ case NUM_NULLS:
+ return isSetNumNulls();
+ case NUM_DVS:
+ return isSetNumDVs();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof DateColumnStatsData)
+ return this.equals((DateColumnStatsData)that);
+ return false;
+ }
+
+ public boolean equals(DateColumnStatsData that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_lowValue = true && this.isSetLowValue();
+ boolean that_present_lowValue = true && that.isSetLowValue();
+ if (this_present_lowValue || that_present_lowValue) {
+ if (!(this_present_lowValue && that_present_lowValue))
+ return false;
+ if (!this.lowValue.equals(that.lowValue))
+ return false;
+ }
+
+ boolean this_present_highValue = true && this.isSetHighValue();
+ boolean that_present_highValue = true && that.isSetHighValue();
+ if (this_present_highValue || that_present_highValue) {
+ if (!(this_present_highValue && that_present_highValue))
+ return false;
+ if (!this.highValue.equals(that.highValue))
+ return false;
+ }
+
+ boolean this_present_numNulls = true;
+ boolean that_present_numNulls = true;
+ if (this_present_numNulls || that_present_numNulls) {
+ if (!(this_present_numNulls && that_present_numNulls))
+ return false;
+ if (this.numNulls != that.numNulls)
+ return false;
+ }
+
+ boolean this_present_numDVs = true;
+ boolean that_present_numDVs = true;
+ if (this_present_numDVs || that_present_numDVs) {
+ if (!(this_present_numDVs && that_present_numDVs))
+ return false;
+ if (this.numDVs != that.numDVs)
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ HashCodeBuilder builder = new HashCodeBuilder();
+
+ boolean present_lowValue = true && (isSetLowValue());
+ builder.append(present_lowValue);
+ if (present_lowValue)
+ builder.append(lowValue);
+
+ boolean present_highValue = true && (isSetHighValue());
+ builder.append(present_highValue);
+ if (present_highValue)
+ builder.append(highValue);
+
+ boolean present_numNulls = true;
+ builder.append(present_numNulls);
+ if (present_numNulls)
+ builder.append(numNulls);
+
+ boolean present_numDVs = true;
+ builder.append(present_numDVs);
+ if (present_numDVs)
+ builder.append(numDVs);
+
+ return builder.toHashCode();
+ }
+
+ public int compareTo(DateColumnStatsData other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+ DateColumnStatsData typedOther = (DateColumnStatsData)other;
+
+ lastComparison = Boolean.valueOf(isSetLowValue()).compareTo(typedOther.isSetLowValue());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetLowValue()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.lowValue, typedOther.lowValue);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(isSetHighValue()).compareTo(typedOther.isSetHighValue());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetHighValue()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.highValue, typedOther.highValue);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(isSetNumNulls()).compareTo(typedOther.isSetNumNulls());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetNumNulls()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.numNulls, typedOther.numNulls);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(isSetNumDVs()).compareTo(typedOther.isSetNumDVs());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetNumDVs()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.numDVs, typedOther.numDVs);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("DateColumnStatsData(");
+ boolean first = true;
+
+ if (isSetLowValue()) {
+ sb.append("lowValue:");
+ if (this.lowValue == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.lowValue);
+ }
+ first = false;
+ }
+ if (isSetHighValue()) {
+ if (!first) sb.append(", ");
+ sb.append("highValue:");
+ if (this.highValue == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.highValue);
+ }
+ first = false;
+ }
+ if (!first) sb.append(", ");
+ sb.append("numNulls:");
+ sb.append(this.numNulls);
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("numDVs:");
+ sb.append(this.numDVs);
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!isSetNumNulls()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'numNulls' is unset! Struct:" + toString());
+ }
+
+ if (!isSetNumDVs()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'numDVs' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ if (lowValue != null) {
+ lowValue.validate();
+ }
+ if (highValue != null) {
+ highValue.validate();
+ }
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bitfield = 0;
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class DateColumnStatsDataStandardSchemeFactory implements SchemeFactory {
+ public DateColumnStatsDataStandardScheme getScheme() {
+ return new DateColumnStatsDataStandardScheme();
+ }
+ }
+
+ private static class DateColumnStatsDataStandardScheme extends StandardScheme<DateColumnStatsData> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, DateColumnStatsData struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // LOW_VALUE
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.lowValue = new Date();
+ struct.lowValue.read(iprot);
+ struct.setLowValueIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // HIGH_VALUE
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.highValue = new Date();
+ struct.highValue.read(iprot);
+ struct.setHighValueIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 3: // NUM_NULLS
+ if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
+ struct.numNulls = iprot.readI64();
+ struct.setNumNullsIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 4: // NUM_DVS
+ if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
+ struct.numDVs = iprot.readI64();
+ struct.setNumDVsIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, DateColumnStatsData struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.lowValue != null) {
+ if (struct.isSetLowValue()) {
+ oprot.writeFieldBegin(LOW_VALUE_FIELD_DESC);
+ struct.lowValue.write(oprot);
+ oprot.writeFieldEnd();
+ }
+ }
+ if (struct.highValue != null) {
+ if (struct.isSetHighValue()) {
+ oprot.writeFieldBegin(HIGH_VALUE_FIELD_DESC);
+ struct.highValue.write(oprot);
+ oprot.writeFieldEnd();
+ }
+ }
+ oprot.writeFieldBegin(NUM_NULLS_FIELD_DESC);
+ oprot.writeI64(struct.numNulls);
+ oprot.writeFieldEnd();
+ oprot.writeFieldBegin(NUM_DVS_FIELD_DESC);
+ oprot.writeI64(struct.numDVs);
+ oprot.writeFieldEnd();
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class DateColumnStatsDataTupleSchemeFactory implements SchemeFactory {
+ public DateColumnStatsDataTupleScheme getScheme() {
+ return new DateColumnStatsDataTupleScheme();
+ }
+ }
+
+ private static class DateColumnStatsDataTupleScheme extends TupleScheme<DateColumnStatsData> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, DateColumnStatsData struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeI64(struct.numNulls);
+ oprot.writeI64(struct.numDVs);
+ BitSet optionals = new BitSet();
+ if (struct.isSetLowValue()) {
+ optionals.set(0);
+ }
+ if (struct.isSetHighValue()) {
+ optionals.set(1);
+ }
+ oprot.writeBitSet(optionals, 2);
+ if (struct.isSetLowValue()) {
+ struct.lowValue.write(oprot);
+ }
+ if (struct.isSetHighValue()) {
+ struct.highValue.write(oprot);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, DateColumnStatsData struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.numNulls = iprot.readI64();
+ struct.setNumNullsIsSet(true);
+ struct.numDVs = iprot.readI64();
+ struct.setNumDVsIsSet(true);
+ BitSet incoming = iprot.readBitSet(2);
+ if (incoming.get(0)) {
+ struct.lowValue = new Date();
+ struct.lowValue.read(iprot);
+ struct.setLowValueIsSet(true);
+ }
+ if (incoming.get(1)) {
+ struct.highValue = new Date();
+ struct.highValue.read(iprot);
+ struct.setHighValueIsSet(true);
+ }
+ }
+ }
+
+}
+

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/DropPartitionsResult.java Tue Apr 14 20:47:29 2015
@@ -346,7 +346,7 @@ public class DropPartitionsResult implem
                  struct.partitions = new ArrayList<Partition>(_list396.size);
                  for (int _i397 = 0; _i397 < _list396.size; ++_i397)
                  {
- Partition _elem398; // required
+ Partition _elem398; // optional
                    _elem398 = new Partition();
                    _elem398.read(iprot);
                    struct.partitions.add(_elem398);
@@ -428,7 +428,7 @@ public class DropPartitionsResult implem
            struct.partitions = new ArrayList<Partition>(_list401.size);
            for (int _i402 = 0; _i402 < _list401.size; ++_i402)
            {
- Partition _elem403; // required
+ Partition _elem403; // optional
              _elem403 = new Partition();
              _elem403.read(iprot);
              struct.partitions.add(_elem403);

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java Tue Apr 14 20:47:29 2015
@@ -135,9 +135,9 @@ public class FieldSchema implements org.
      String comment)
    {
      this();
- this.name = org.apache.hive.common.util.HiveStringUtils.intern(name);
- this.type = org.apache.hive.common.util.HiveStringUtils.intern(type);
- this.comment = org.apache.hive.common.util.HiveStringUtils.intern(comment);
+ this.name = name;
+ this.type = type;
+ this.comment = comment;
    }

    /**
@@ -145,13 +145,13 @@ public class FieldSchema implements org.
     */
    public FieldSchema(FieldSchema other) {
      if (other.isSetName()) {
- this.name = org.apache.hive.common.util.HiveStringUtils.intern(other.name);
+ this.name = other.name;
      }
      if (other.isSetType()) {
- this.type = org.apache.hive.common.util.HiveStringUtils.intern(other.type);
+ this.type = other.type;
      }
      if (other.isSetComment()) {
- this.comment = org.apache.hive.common.util.HiveStringUtils.intern(other.comment);
+ this.comment = other.comment;
      }
    }

@@ -171,7 +171,7 @@ public class FieldSchema implements org.
    }

    public void setName(String name) {
- this.name = org.apache.hive.common.util.HiveStringUtils.intern(name);
+ this.name = name;
    }

    public void unsetName() {
@@ -194,7 +194,7 @@ public class FieldSchema implements org.
    }

    public void setType(String type) {
- this.type = org.apache.hive.common.util.HiveStringUtils.intern(type);
+ this.type = type;
    }

    public void unsetType() {
@@ -217,7 +217,7 @@ public class FieldSchema implements org.
    }

    public void setComment(String comment) {
- this.comment = org.apache.hive.common.util.HiveStringUtils.intern(comment);
+ this.comment = comment;
    }

    public void unsetComment() {

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequest.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequest.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequest.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FireEventRequest.java Tue Apr 14 20:47:29 2015
@@ -716,7 +716,7 @@ public class FireEventRequest implements
                  struct.partitionVals = new ArrayList<String>(_list516.size);
                  for (int _i517 = 0; _i517 < _list516.size; ++_i517)
                  {
- String _elem518; // required
+ String _elem518; // optional
                    _elem518 = iprot.readString();
                    struct.partitionVals.add(_elem518);
                  }
@@ -846,7 +846,7 @@ public class FireEventRequest implements
            struct.partitionVals = new ArrayList<String>(_list521.size);
            for (int _i522 = 0; _i522 < _list521.size; ++_i522)
            {
- String _elem523; // required
+ String _elem523; // optional
              _elem523 = iprot.readString();
              struct.partitionVals.add(_elem523);
            }

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Function.java Tue Apr 14 20:47:29 2015
@@ -997,7 +997,7 @@ public class Function implements org.apa
                  struct.resourceUris = new ArrayList<ResourceUri>(_list420.size);
                  for (int _i421 = 0; _i421 < _list420.size; ++_i421)
                  {
- ResourceUri _elem422; // required
+ ResourceUri _elem422; // optional
                    _elem422 = new ResourceUri();
                    _elem422.read(iprot);
                    struct.resourceUris.add(_elem422);
@@ -1180,7 +1180,7 @@ public class Function implements org.apa
            struct.resourceUris = new ArrayList<ResourceUri>(_list425.size);
            for (int _i426 = 0; _i426 < _list425.size; ++_i426)
            {
- ResourceUri _elem427; // required
+ ResourceUri _elem427; // optional
              _elem427 = new ResourceUri();
              _elem427.read(iprot);
              struct.resourceUris.add(_elem427);

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsInfoResponse.java Tue Apr 14 20:47:29 2015
@@ -447,7 +447,7 @@ public class GetOpenTxnsInfoResponse imp
                  struct.open_txns = new ArrayList<TxnInfo>(_list428.size);
                  for (int _i429 = 0; _i429 < _list428.size; ++_i429)
                  {
- TxnInfo _elem430; // required
+ TxnInfo _elem430; // optional
                    _elem430 = new TxnInfo();
                    _elem430.read(iprot);
                    struct.open_txns.add(_elem430);
@@ -524,7 +524,7 @@ public class GetOpenTxnsInfoResponse imp
          struct.open_txns = new ArrayList<TxnInfo>(_list433.size);
          for (int _i434 = 0; _i434 < _list433.size; ++_i434)
          {
- TxnInfo _elem435; // required
+ TxnInfo _elem435; // optional
            _elem435 = new TxnInfo();
            _elem435.read(iprot);
            struct.open_txns.add(_elem435);

Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java?rev=1673553&r1=1673552&r2=1673553&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetOpenTxnsResponse.java Tue Apr 14 20:47:29 2015
@@ -447,7 +447,7 @@ public class GetOpenTxnsResponse impleme
                  struct.open_txns = new HashSet<Long>(2*_set436.size);
                  for (int _i437 = 0; _i437 < _set436.size; ++_i437)
                  {
- long _elem438; // required
+ long _elem438; // optional
                    _elem438 = iprot.readI64();
                    struct.open_txns.add(_elem438);
                  }
@@ -523,7 +523,7 @@ public class GetOpenTxnsResponse impleme
          struct.open_txns = new HashSet<Long>(2*_set441.size);
          for (int _i442 = 0; _i442 < _set441.size; ++_i442)
          {
- long _elem443; // required
+ long _elem443; // optional
            _elem443 = iprot.readI64();
            struct.open_txns.add(_elem443);
          }

Search Discussions

  • Jdere at Apr 14, 2015 at 8:47 pm
    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPrincipalsInRoleResponse.java Tue Apr 14 20:47:29 2015
    @@ -354,7 +354,7 @@ public class GetPrincipalsInRoleResponse
                      struct.principalGrants = new ArrayList<RolePrincipalGrant>(_list86.size);
                      for (int _i87 = 0; _i87 < _list86.size; ++_i87)
                      {
    - RolePrincipalGrant _elem88; // required
    + RolePrincipalGrant _elem88; // optional
                        _elem88 = new RolePrincipalGrant();
                        _elem88.read(iprot);
                        struct.principalGrants.add(_elem88);
    @@ -425,7 +425,7 @@ public class GetPrincipalsInRoleResponse
              struct.principalGrants = new ArrayList<RolePrincipalGrant>(_list91.size);
              for (int _i92 = 0; _i92 < _list91.size; ++_i92)
              {
    - RolePrincipalGrant _elem93; // required
    + RolePrincipalGrant _elem93; // optional
                _elem93 = new RolePrincipalGrant();
                _elem93.read(iprot);
                struct.principalGrants.add(_elem93);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetRoleGrantsForPrincipalResponse.java Tue Apr 14 20:47:29 2015
    @@ -354,7 +354,7 @@ public class GetRoleGrantsForPrincipalRe
                      struct.principalGrants = new ArrayList<RolePrincipalGrant>(_list78.size);
                      for (int _i79 = 0; _i79 < _list78.size; ++_i79)
                      {
    - RolePrincipalGrant _elem80; // required
    + RolePrincipalGrant _elem80; // optional
                        _elem80 = new RolePrincipalGrant();
                        _elem80.read(iprot);
                        struct.principalGrants.add(_elem80);
    @@ -425,7 +425,7 @@ public class GetRoleGrantsForPrincipalRe
              struct.principalGrants = new ArrayList<RolePrincipalGrant>(_list83.size);
              for (int _i84 = 0; _i84 < _list83.size; ++_i84)
              {
    - RolePrincipalGrant _elem85; // required
    + RolePrincipalGrant _elem85; // optional
                _elem85 = new RolePrincipalGrant();
                _elem85.read(iprot);
                struct.principalGrants.add(_elem85);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeResponse.java Tue Apr 14 20:47:29 2015
    @@ -459,7 +459,7 @@ public class HeartbeatTxnRangeResponse i
                      struct.aborted = new HashSet<Long>(2*_set468.size);
                      for (int _i469 = 0; _i469 < _set468.size; ++_i469)
                      {
    - long _elem470; // required
    + long _elem470; // optional
                        _elem470 = iprot.readI64();
                        struct.aborted.add(_elem470);
                      }
    @@ -477,7 +477,7 @@ public class HeartbeatTxnRangeResponse i
                      struct.nosuch = new HashSet<Long>(2*_set471.size);
                      for (int _i472 = 0; _i472 < _set471.size; ++_i472)
                      {
    - long _elem473; // required
    + long _elem473; // optional
                        _elem473 = iprot.readI64();
                        struct.nosuch.add(_elem473);
                      }
    @@ -566,7 +566,7 @@ public class HeartbeatTxnRangeResponse i
              struct.aborted = new HashSet<Long>(2*_set478.size);
              for (int _i479 = 0; _i479 < _set478.size; ++_i479)
              {
    - long _elem480; // required
    + long _elem480; // optional
                _elem480 = iprot.readI64();
                struct.aborted.add(_elem480);
              }
    @@ -577,7 +577,7 @@ public class HeartbeatTxnRangeResponse i
              struct.nosuch = new HashSet<Long>(2*_set481.size);
              for (int _i482 = 0; _i482 < _set481.size; ++_i482)
              {
    - long _elem483; // required
    + long _elem483; // optional
                _elem483 = iprot.readI64();
                struct.nosuch.add(_elem483);
              }

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java Tue Apr 14 20:47:29 2015
    @@ -710,7 +710,7 @@ public class HiveObjectRef implements or
                      struct.partValues = new ArrayList<String>(_list8.size);
                      for (int _i9 = 0; _i9 < _list8.size; ++_i9)
                      {
    - String _elem10; // required
    + String _elem10; // optional
                        _elem10 = iprot.readString();
                        struct.partValues.add(_elem10);
                      }
    @@ -853,7 +853,7 @@ public class HiveObjectRef implements or
                struct.partValues = new ArrayList<String>(_list13.size);
                for (int _i14 = 0; _i14 < _list13.size; ++_i14)
                {
    - String _elem15; // required
    + String _elem15; // optional
                  _elem15 = iprot.readString();
                  struct.partValues.add(_elem15);
                }

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InsertEventRequestData.java Tue Apr 14 20:47:29 2015
    @@ -354,7 +354,7 @@ public class InsertEventRequestData impl
                      struct.filesAdded = new ArrayList<String>(_list508.size);
                      for (int _i509 = 0; _i509 < _list508.size; ++_i509)
                      {
    - String _elem510; // required
    + String _elem510; // optional
                        _elem510 = iprot.readString();
                        struct.filesAdded.add(_elem510);
                      }
    @@ -424,7 +424,7 @@ public class InsertEventRequestData impl
              struct.filesAdded = new ArrayList<String>(_list513.size);
              for (int _i514 = 0; _i514 < _list513.size; ++_i514)
              {
    - String _elem515; // required
    + String _elem515; // optional
                _elem515 = iprot.readString();
                struct.filesAdded.add(_elem515);
              }

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/LockRequest.java Tue Apr 14 20:47:29 2015
    @@ -605,7 +605,7 @@ public class LockRequest implements org.
                      struct.component = new ArrayList<LockComponent>(_list452.size);
                      for (int _i453 = 0; _i453 < _list452.size; ++_i453)
                      {
    - LockComponent _elem454; // required
    + LockComponent _elem454; // optional
                        _elem454 = new LockComponent();
                        _elem454.read(iprot);
                        struct.component.add(_elem454);
    @@ -725,7 +725,7 @@ public class LockRequest implements org.
              struct.component = new ArrayList<LockComponent>(_list457.size);
              for (int _i458 = 0; _i458 < _list457.size; ++_i458)
              {
    - LockComponent _elem459; // required
    + LockComponent _elem459; // optional
                _elem459 = new LockComponent();
                _elem459.read(iprot);
                struct.component.add(_elem459);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEventResponse.java Tue Apr 14 20:47:29 2015
    @@ -354,7 +354,7 @@ public class NotificationEventResponse i
                      struct.events = new ArrayList<NotificationEvent>(_list500.size);
                      for (int _i501 = 0; _i501 < _list500.size; ++_i501)
                      {
    - NotificationEvent _elem502; // required
    + NotificationEvent _elem502; // optional
                        _elem502 = new NotificationEvent();
                        _elem502.read(iprot);
                        struct.events.add(_elem502);
    @@ -425,7 +425,7 @@ public class NotificationEventResponse i
              struct.events = new ArrayList<NotificationEvent>(_list505.size);
              for (int _i506 = 0; _i506 < _list505.size; ++_i506)
              {
    - NotificationEvent _elem507; // required
    + NotificationEvent _elem507; // optional
                _elem507 = new NotificationEvent();
                _elem507.read(iprot);
                struct.events.add(_elem507);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/OpenTxnsResponse.java Tue Apr 14 20:47:29 2015
    @@ -354,7 +354,7 @@ public class OpenTxnsResponse implements
                      struct.txn_ids = new ArrayList<Long>(_list444.size);
                      for (int _i445 = 0; _i445 < _list444.size; ++_i445)
                      {
    - long _elem446; // required
    + long _elem446; // optional
                        _elem446 = iprot.readI64();
                        struct.txn_ids.add(_elem446);
                      }
    @@ -424,7 +424,7 @@ public class OpenTxnsResponse implements
              struct.txn_ids = new ArrayList<Long>(_list449.size);
              for (int _i450 = 0; _i450 < _list449.size; ++_i450)
              {
    - long _elem451; // required
    + long _elem451; // optional
                _elem451 = iprot.readI64();
                struct.txn_ids.add(_elem451);
              }

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java Tue Apr 14 20:47:29 2015
    @@ -182,14 +182,14 @@ public class Partition implements org.ap
        {
          this();
          this.values = values;
    - this.dbName = org.apache.hive.common.util.HiveStringUtils.intern(dbName);
    - this.tableName = org.apache.hive.common.util.HiveStringUtils.intern(tableName);
    + this.dbName = dbName;
    + this.tableName = tableName;
          this.createTime = createTime;
          setCreateTimeIsSet(true);
          this.lastAccessTime = lastAccessTime;
          setLastAccessTimeIsSet(true);
          this.sd = sd;
    - this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
    + this.parameters = parameters;
        }

        /**
    @@ -205,10 +205,10 @@ public class Partition implements org.ap
            this.values = __this__values;
          }
          if (other.isSetDbName()) {
    - this.dbName = org.apache.hive.common.util.HiveStringUtils.intern(other.dbName);
    + this.dbName = other.dbName;
          }
          if (other.isSetTableName()) {
    - this.tableName = org.apache.hive.common.util.HiveStringUtils.intern(other.tableName);
    + this.tableName = other.tableName;
          }
          this.createTime = other.createTime;
          this.lastAccessTime = other.lastAccessTime;
    @@ -222,9 +222,9 @@ public class Partition implements org.ap
              String other_element_key = other_element.getKey();
              String other_element_value = other_element.getValue();

    - String __this__parameters_copy_key = org.apache.hive.common.util.HiveStringUtils.intern(other_element_key);
    + String __this__parameters_copy_key = other_element_key;

    - String __this__parameters_copy_value = org.apache.hive.common.util.HiveStringUtils.intern(other_element_value);
    + String __this__parameters_copy_value = other_element_value;

              __this__parameters.put(__this__parameters_copy_key, __this__parameters_copy_value);
            }
    @@ -296,7 +296,7 @@ public class Partition implements org.ap
        }

        public void setDbName(String dbName) {
    - this.dbName = org.apache.hive.common.util.HiveStringUtils.intern(dbName);
    + this.dbName = dbName;
        }

        public void unsetDbName() {
    @@ -319,7 +319,7 @@ public class Partition implements org.ap
        }

        public void setTableName(String tableName) {
    - this.tableName = org.apache.hive.common.util.HiveStringUtils.intern(tableName);
    + this.tableName = tableName;
        }

        public void unsetTableName() {
    @@ -420,7 +420,7 @@ public class Partition implements org.ap
        }

        public void setParameters(Map<String,String> parameters) {
    - this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
    + this.parameters = parameters;
        }

        public void unsetParameters() {
    @@ -945,7 +945,7 @@ public class Partition implements org.ap
                      struct.values = new ArrayList<String>(_list208.size);
                      for (int _i209 = 0; _i209 < _list208.size; ++_i209)
                      {
    - String _elem210; // required
    + String _elem210; // optional
                        _elem210 = iprot.readString();
                        struct.values.add(_elem210);
                      }
    @@ -1184,7 +1184,7 @@ public class Partition implements org.ap
                struct.values = new ArrayList<String>(_list219.size);
                for (int _i220 = 0; _i220 < _list219.size; ++_i220)
                {
    - String _elem221; // required
    + String _elem221; // optional
                  _elem221 = iprot.readString();
                  struct.values.add(_elem221);
                }

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionListComposingSpec.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionListComposingSpec.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionListComposingSpec.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionListComposingSpec.java Tue Apr 14 20:47:29 2015
    @@ -350,7 +350,7 @@ public class PartitionListComposingSpec
                      struct.partitions = new ArrayList<Partition>(_list252.size);
                      for (int _i253 = 0; _i253 < _list252.size; ++_i253)
                      {
    - Partition _elem254; // required
    + Partition _elem254; // optional
                        _elem254 = new Partition();
                        _elem254.read(iprot);
                        struct.partitions.add(_elem254);
    @@ -430,7 +430,7 @@ public class PartitionListComposingSpec
                struct.partitions = new ArrayList<Partition>(_list257.size);
                for (int _i258 = 0; _i258 < _list257.size; ++_i258)
                {
    - Partition _elem259; // required
    + Partition _elem259; // optional
                  _elem259 = new Partition();
                  _elem259.read(iprot);
                  struct.partitions.add(_elem259);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpecWithSharedSD.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpecWithSharedSD.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpecWithSharedSD.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionSpecWithSharedSD.java Tue Apr 14 20:47:29 2015
    @@ -434,7 +434,7 @@ public class PartitionSpecWithSharedSD i
                      struct.partitions = new ArrayList<PartitionWithoutSD>(_list244.size);
                      for (int _i245 = 0; _i245 < _list244.size; ++_i245)
                      {
    - PartitionWithoutSD _elem246; // required
    + PartitionWithoutSD _elem246; // optional
                        _elem246 = new PartitionWithoutSD();
                        _elem246.read(iprot);
                        struct.partitions.add(_elem246);
    @@ -534,7 +534,7 @@ public class PartitionSpecWithSharedSD i
                struct.partitions = new ArrayList<PartitionWithoutSD>(_list249.size);
                for (int _i250 = 0; _i250 < _list249.size; ++_i250)
                {
    - PartitionWithoutSD _elem251; // required
    + PartitionWithoutSD _elem251; // optional
                  _elem251 = new PartitionWithoutSD();
                  _elem251.read(iprot);
                  struct.partitions.add(_elem251);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionWithoutSD.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionWithoutSD.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionWithoutSD.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionWithoutSD.java Tue Apr 14 20:47:29 2015
    @@ -780,7 +780,7 @@ public class PartitionWithoutSD implemen
                      struct.values = new ArrayList<String>(_list226.size);
                      for (int _i227 = 0; _i227 < _list226.size; ++_i227)
                      {
    - String _elem228; // required
    + String _elem228; // optional
                        _elem228 = iprot.readString();
                        struct.values.add(_elem228);
                      }
    @@ -980,7 +980,7 @@ public class PartitionWithoutSD implemen
                struct.values = new ArrayList<String>(_list237.size);
                for (int _i238 = 0; _i238 < _list237.size; ++_i238)
                {
    - String _elem239; // required
    + String _elem239; // optional
                  _elem239 = iprot.readString();
                  struct.values.add(_elem239);
                }

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsByExprResult.java Tue Apr 14 20:47:29 2015
    @@ -439,7 +439,7 @@ public class PartitionsByExprResult impl
                      struct.partitions = new ArrayList<Partition>(_list322.size);
                      for (int _i323 = 0; _i323 < _list322.size; ++_i323)
                      {
    - Partition _elem324; // required
    + Partition _elem324; // optional
                        _elem324 = new Partition();
                        _elem324.read(iprot);
                        struct.partitions.add(_elem324);
    @@ -522,7 +522,7 @@ public class PartitionsByExprResult impl
              struct.partitions = new ArrayList<Partition>(_list327.size);
              for (int _i328 = 0; _i328 < _list327.size; ++_i328)
              {
    - Partition _elem329; // required
    + Partition _elem329; // optional
                _elem329 = new Partition();
                _elem329.read(iprot);
                struct.partitions.add(_elem329);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java Tue Apr 14 20:47:29 2015
    @@ -645,7 +645,7 @@ public class PartitionsStatsRequest impl
                      struct.colNames = new ArrayList<String>(_list364.size);
                      for (int _i365 = 0; _i365 < _list364.size; ++_i365)
                      {
    - String _elem366; // required
    + String _elem366; // optional
                        _elem366 = iprot.readString();
                        struct.colNames.add(_elem366);
                      }
    @@ -663,7 +663,7 @@ public class PartitionsStatsRequest impl
                      struct.partNames = new ArrayList<String>(_list367.size);
                      for (int _i368 = 0; _i368 < _list367.size; ++_i368)
                      {
    - String _elem369; // required
    + String _elem369; // optional
                        _elem369 = iprot.readString();
                        struct.partNames.add(_elem369);
                      }
    @@ -768,7 +768,7 @@ public class PartitionsStatsRequest impl
              struct.colNames = new ArrayList<String>(_list374.size);
              for (int _i375 = 0; _i375 < _list374.size; ++_i375)
              {
    - String _elem376; // required
    + String _elem376; // optional
                _elem376 = iprot.readString();
                struct.colNames.add(_elem376);
              }
    @@ -779,7 +779,7 @@ public class PartitionsStatsRequest impl
              struct.partNames = new ArrayList<String>(_list377.size);
              for (int _i378 = 0; _i378 < _list377.size; ++_i378)
              {
    - String _elem379; // required
    + String _elem379; // optional
                _elem379 = iprot.readString();
                struct.partNames.add(_elem379);
              }

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsResult.java Tue Apr 14 20:47:29 2015
    @@ -371,7 +371,7 @@ public class PartitionsStatsResult imple
                          _val341 = new ArrayList<ColumnStatisticsObj>(_list342.size);
                          for (int _i343 = 0; _i343 < _list342.size; ++_i343)
                          {
    - ColumnStatisticsObj _elem344; // required
    + ColumnStatisticsObj _elem344; // optional
                            _elem344 = new ColumnStatisticsObj();
                            _elem344.read(iprot);
                            _val341.add(_elem344);
    @@ -469,7 +469,7 @@ public class PartitionsStatsResult imple
                  _val352 = new ArrayList<ColumnStatisticsObj>(_list353.size);
                  for (int _i354 = 0; _i354 < _list353.size; ++_i354)
                  {
    - ColumnStatisticsObj _elem355; // required
    + ColumnStatisticsObj _elem355; // optional
                    _elem355 = new ColumnStatisticsObj();
                    _elem355.read(iprot);
                    _val352.add(_elem355);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java Tue Apr 14 20:47:29 2015
    @@ -587,7 +587,7 @@ public class PrincipalPrivilegeSet imple
                          _val27 = new ArrayList<PrivilegeGrantInfo>(_list28.size);
                          for (int _i29 = 0; _i29 < _list28.size; ++_i29)
                          {
    - PrivilegeGrantInfo _elem30; // required
    + PrivilegeGrantInfo _elem30; // optional
                            _elem30 = new PrivilegeGrantInfo();
                            _elem30.read(iprot);
                            _val27.add(_elem30);
    @@ -618,7 +618,7 @@ public class PrincipalPrivilegeSet imple
                          _val34 = new ArrayList<PrivilegeGrantInfo>(_list35.size);
                          for (int _i36 = 0; _i36 < _list35.size; ++_i36)
                          {
    - PrivilegeGrantInfo _elem37; // required
    + PrivilegeGrantInfo _elem37; // optional
                            _elem37 = new PrivilegeGrantInfo();
                            _elem37.read(iprot);
                            _val34.add(_elem37);
    @@ -649,7 +649,7 @@ public class PrincipalPrivilegeSet imple
                          _val41 = new ArrayList<PrivilegeGrantInfo>(_list42.size);
                          for (int _i43 = 0; _i43 < _list42.size; ++_i43)
                          {
    - PrivilegeGrantInfo _elem44; // required
    + PrivilegeGrantInfo _elem44; // optional
                            _elem44 = new PrivilegeGrantInfo();
                            _elem44.read(iprot);
                            _val41.add(_elem44);
    @@ -834,7 +834,7 @@ public class PrincipalPrivilegeSet imple
                    _val60 = new ArrayList<PrivilegeGrantInfo>(_list61.size);
                    for (int _i62 = 0; _i62 < _list61.size; ++_i62)
                    {
    - PrivilegeGrantInfo _elem63; // required
    + PrivilegeGrantInfo _elem63; // optional
                      _elem63 = new PrivilegeGrantInfo();
                      _elem63.read(iprot);
                      _val60.add(_elem63);
    @@ -859,7 +859,7 @@ public class PrincipalPrivilegeSet imple
                    _val67 = new ArrayList<PrivilegeGrantInfo>(_list68.size);
                    for (int _i69 = 0; _i69 < _list68.size; ++_i69)
                    {
    - PrivilegeGrantInfo _elem70; // required
    + PrivilegeGrantInfo _elem70; // optional
                      _elem70 = new PrivilegeGrantInfo();
                      _elem70.read(iprot);
                      _val67.add(_elem70);
    @@ -884,7 +884,7 @@ public class PrincipalPrivilegeSet imple
                    _val74 = new ArrayList<PrivilegeGrantInfo>(_list75.size);
                    for (int _i76 = 0; _i76 < _list75.size; ++_i76)
                    {
    - PrivilegeGrantInfo _elem77; // required
    + PrivilegeGrantInfo _elem77; // optional
                      _elem77 = new PrivilegeGrantInfo();
                      _elem77.read(iprot);
                      _val74.add(_elem77);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java Tue Apr 14 20:47:29 2015
    @@ -350,7 +350,7 @@ public class PrivilegeBag implements org
                      struct.privileges = new ArrayList<HiveObjectPrivilege>(_list16.size);
                      for (int _i17 = 0; _i17 < _list16.size; ++_i17)
                      {
    - HiveObjectPrivilege _elem18; // required
    + HiveObjectPrivilege _elem18; // optional
                        _elem18 = new HiveObjectPrivilege();
                        _elem18.read(iprot);
                        struct.privileges.add(_elem18);
    @@ -430,7 +430,7 @@ public class PrivilegeBag implements org
                struct.privileges = new ArrayList<HiveObjectPrivilege>(_list21.size);
                for (int _i22 = 0; _i22 < _list21.size; ++_i22)
                {
    - HiveObjectPrivilege _elem23; // required
    + HiveObjectPrivilege _elem23; // optional
                  _elem23 = new HiveObjectPrivilege();
                  _elem23.read(iprot);
                  struct.privileges.add(_elem23);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/RequestPartsSpec.java Tue Apr 14 20:47:29 2015
    @@ -169,7 +169,7 @@ public class RequestPartsSpec extends or
                    names = new ArrayList<String>(_list404.size);
                    for (int _i405 = 0; _i405 < _list404.size; ++_i405)
                    {
    - String _elem406; // required
    + String _elem406; // optional
                      _elem406 = iprot.readString();
                      names.add(_elem406);
                    }
    @@ -188,7 +188,7 @@ public class RequestPartsSpec extends or
                    exprs = new ArrayList<DropPartitionsExpr>(_list407.size);
                    for (int _i408 = 0; _i408 < _list407.size; ++_i408)
                    {
    - DropPartitionsExpr _elem409; // required
    + DropPartitionsExpr _elem409; // optional
                      _elem409 = new DropPartitionsExpr();
                      _elem409.read(iprot);
                      exprs.add(_elem409);
    @@ -250,7 +250,7 @@ public class RequestPartsSpec extends or
                  names = new ArrayList<String>(_list412.size);
                  for (int _i413 = 0; _i413 < _list412.size; ++_i413)
                  {
    - String _elem414; // required
    + String _elem414; // optional
                    _elem414 = iprot.readString();
                    names.add(_elem414);
                  }
    @@ -264,7 +264,7 @@ public class RequestPartsSpec extends or
                  exprs = new ArrayList<DropPartitionsExpr>(_list415.size);
                  for (int _i416 = 0; _i416 < _list415.size; ++_i416)
                  {
    - DropPartitionsExpr _elem417; // required
    + DropPartitionsExpr _elem417; // optional
                    _elem417 = new DropPartitionsExpr();
                    _elem417.read(iprot);
                    exprs.add(_elem417);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java Tue Apr 14 20:47:29 2015
    @@ -456,7 +456,7 @@ public class Schema implements org.apach
                      struct.fieldSchemas = new ArrayList<FieldSchema>(_list294.size);
                      for (int _i295 = 0; _i295 < _list294.size; ++_i295)
                      {
    - FieldSchema _elem296; // required
    + FieldSchema _elem296; // optional
                        _elem296 = new FieldSchema();
                        _elem296.read(iprot);
                        struct.fieldSchemas.add(_elem296);
    @@ -582,7 +582,7 @@ public class Schema implements org.apach
                struct.fieldSchemas = new ArrayList<FieldSchema>(_list305.size);
                for (int _i306 = 0; _i306 < _list305.size; ++_i306)
                {
    - FieldSchema _elem307; // required
    + FieldSchema _elem307; // optional
                  _elem307 = new FieldSchema();
                  _elem307.read(iprot);
                  struct.fieldSchemas.add(_elem307);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java Tue Apr 14 20:47:29 2015
    @@ -137,9 +137,9 @@ public class SerDeInfo implements org.ap
          Map<String,String> parameters)
        {
          this();
    - this.name = org.apache.hive.common.util.HiveStringUtils.intern(name);
    - this.serializationLib = org.apache.hive.common.util.HiveStringUtils.intern(serializationLib);
    - this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
    + this.name = name;
    + this.serializationLib = serializationLib;
    + this.parameters = parameters;
        }

        /**
    @@ -147,10 +147,10 @@ public class SerDeInfo implements org.ap
         */
        public SerDeInfo(SerDeInfo other) {
          if (other.isSetName()) {
    - this.name = org.apache.hive.common.util.HiveStringUtils.intern(other.name);
    + this.name = other.name;
          }
          if (other.isSetSerializationLib()) {
    - this.serializationLib = org.apache.hive.common.util.HiveStringUtils.intern(other.serializationLib);
    + this.serializationLib = other.serializationLib;
          }
          if (other.isSetParameters()) {
            Map<String,String> __this__parameters = new HashMap<String,String>();
    @@ -159,9 +159,9 @@ public class SerDeInfo implements org.ap
              String other_element_key = other_element.getKey();
              String other_element_value = other_element.getValue();

    - String __this__parameters_copy_key = org.apache.hive.common.util.HiveStringUtils.intern(other_element_key);
    + String __this__parameters_copy_key = other_element_key;

    - String __this__parameters_copy_value = org.apache.hive.common.util.HiveStringUtils.intern(other_element_value);
    + String __this__parameters_copy_value = other_element_value;

              __this__parameters.put(__this__parameters_copy_key, __this__parameters_copy_value);
            }
    @@ -185,7 +185,7 @@ public class SerDeInfo implements org.ap
        }

        public void setName(String name) {
    - this.name = org.apache.hive.common.util.HiveStringUtils.intern(name);
    + this.name = name;
        }

        public void unsetName() {
    @@ -208,7 +208,7 @@ public class SerDeInfo implements org.ap
        }

        public void setSerializationLib(String serializationLib) {
    - this.serializationLib = org.apache.hive.common.util.HiveStringUtils.intern(serializationLib);
    + this.serializationLib = serializationLib;
        }

        public void unsetSerializationLib() {
    @@ -242,7 +242,7 @@ public class SerDeInfo implements org.ap
        }

        public void setParameters(Map<String,String> parameters) {
    - this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
    + this.parameters = parameters;
        }

        public void unsetParameters() {

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SetPartitionsStatsRequest.java Tue Apr 14 20:47:29 2015
    @@ -354,7 +354,7 @@ public class SetPartitionsStatsRequest i
                      struct.colStats = new ArrayList<ColumnStatistics>(_list286.size);
                      for (int _i287 = 0; _i287 < _list286.size; ++_i287)
                      {
    - ColumnStatistics _elem288; // required
    + ColumnStatistics _elem288; // optional
                        _elem288 = new ColumnStatistics();
                        _elem288.read(iprot);
                        struct.colStats.add(_elem288);
    @@ -425,7 +425,7 @@ public class SetPartitionsStatsRequest i
              struct.colStats = new ArrayList<ColumnStatistics>(_list291.size);
              for (int _i292 = 0; _i292 < _list291.size; ++_i292)
              {
    - ColumnStatistics _elem293; // required
    + ColumnStatistics _elem293; // optional
                _elem293 = new ColumnStatistics();
                _elem293.read(iprot);
                struct.colStats.add(_elem293);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowCompactResponse.java Tue Apr 14 20:47:29 2015
    @@ -354,7 +354,7 @@ public class ShowCompactResponse impleme
                      struct.compacts = new ArrayList<ShowCompactResponseElement>(_list484.size);
                      for (int _i485 = 0; _i485 < _list484.size; ++_i485)
                      {
    - ShowCompactResponseElement _elem486; // required
    + ShowCompactResponseElement _elem486; // optional
                        _elem486 = new ShowCompactResponseElement();
                        _elem486.read(iprot);
                        struct.compacts.add(_elem486);
    @@ -425,7 +425,7 @@ public class ShowCompactResponse impleme
              struct.compacts = new ArrayList<ShowCompactResponseElement>(_list489.size);
              for (int _i490 = 0; _i490 < _list489.size; ++_i490)
              {
    - ShowCompactResponseElement _elem491; // required
    + ShowCompactResponseElement _elem491; // optional
                _elem491 = new ShowCompactResponseElement();
                _elem491.read(iprot);
                struct.compacts.add(_elem491);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ShowLocksResponse.java Tue Apr 14 20:47:29 2015
    @@ -350,7 +350,7 @@ public class ShowLocksResponse implement
                      struct.locks = new ArrayList<ShowLocksResponseElement>(_list460.size);
                      for (int _i461 = 0; _i461 < _list460.size; ++_i461)
                      {
    - ShowLocksResponseElement _elem462; // required
    + ShowLocksResponseElement _elem462; // optional
                        _elem462 = new ShowLocksResponseElement();
                        _elem462.read(iprot);
                        struct.locks.add(_elem462);
    @@ -430,7 +430,7 @@ public class ShowLocksResponse implement
                struct.locks = new ArrayList<ShowLocksResponseElement>(_list465.size);
                for (int _i466 = 0; _i466 < _list465.size; ++_i466)
                {
    - ShowLocksResponseElement _elem467; // required
    + ShowLocksResponseElement _elem467; // optional
                  _elem467 = new ShowLocksResponseElement();
                  _elem467.read(iprot);
                  struct.locks.add(_elem467);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java Tue Apr 14 20:47:29 2015
    @@ -566,7 +566,7 @@ public class SkewedInfo implements org.a
                      struct.skewedColNames = new ArrayList<String>(_list114.size);
                      for (int _i115 = 0; _i115 < _list114.size; ++_i115)
                      {
    - String _elem116; // required
    + String _elem116; // optional
                        _elem116 = iprot.readString();
                        struct.skewedColNames.add(_elem116);
                      }
    @@ -584,13 +584,13 @@ public class SkewedInfo implements org.a
                      struct.skewedColValues = new ArrayList<List<String>>(_list117.size);
                      for (int _i118 = 0; _i118 < _list117.size; ++_i118)
                      {
    - List<String> _elem119; // required
    + List<String> _elem119; // optional
                        {
                          org.apache.thrift.protocol.TList _list120 = iprot.readListBegin();
                          _elem119 = new ArrayList<String>(_list120.size);
                          for (int _i121 = 0; _i121 < _list120.size; ++_i121)
                          {
    - String _elem122; // required
    + String _elem122; // optional
                            _elem122 = iprot.readString();
                            _elem119.add(_elem122);
                          }
    @@ -619,7 +619,7 @@ public class SkewedInfo implements org.a
                          _key125 = new ArrayList<String>(_list127.size);
                          for (int _i128 = 0; _i128 < _list127.size; ++_i128)
                          {
    - String _elem129; // required
    + String _elem129; // optional
                            _elem129 = iprot.readString();
                            _key125.add(_elem129);
                          }
    @@ -779,7 +779,7 @@ public class SkewedInfo implements org.a
                struct.skewedColNames = new ArrayList<String>(_list140.size);
                for (int _i141 = 0; _i141 < _list140.size; ++_i141)
                {
    - String _elem142; // required
    + String _elem142; // optional
                  _elem142 = iprot.readString();
                  struct.skewedColNames.add(_elem142);
                }
    @@ -792,13 +792,13 @@ public class SkewedInfo implements org.a
                struct.skewedColValues = new ArrayList<List<String>>(_list143.size);
                for (int _i144 = 0; _i144 < _list143.size; ++_i144)
                {
    - List<String> _elem145; // required
    + List<String> _elem145; // optional
                  {
                    org.apache.thrift.protocol.TList _list146 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
                    _elem145 = new ArrayList<String>(_list146.size);
                    for (int _i147 = 0; _i147 < _list146.size; ++_i147)
                    {
    - String _elem148; // required
    + String _elem148; // optional
                      _elem148 = iprot.readString();
                      _elem145.add(_elem148);
                    }
    @@ -821,7 +821,7 @@ public class SkewedInfo implements org.a
                    _key151 = new ArrayList<String>(_list153.size);
                    for (int _i154 = 0; _i154 < _list153.size; ++_i154)
                    {
    - String _elem155; // required
    + String _elem155; // optional
                      _elem155 = iprot.readString();
                      _key151.add(_elem155);
                    }

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java Tue Apr 14 20:47:29 2015
    @@ -216,17 +216,17 @@ public class StorageDescriptor implement
        {
          this();
          this.cols = cols;
    - this.location = org.apache.hive.common.util.HiveStringUtils.intern(location);
    - this.inputFormat = org.apache.hive.common.util.HiveStringUtils.intern(inputFormat);
    - this.outputFormat = org.apache.hive.common.util.HiveStringUtils.intern(outputFormat);
    + this.location = location;
    + this.inputFormat = inputFormat;
    + this.outputFormat = outputFormat;
          this.compressed = compressed;
          setCompressedIsSet(true);
          this.numBuckets = numBuckets;
          setNumBucketsIsSet(true);
          this.serdeInfo = serdeInfo;
    - this.bucketCols = org.apache.hive.common.util.HiveStringUtils.intern(bucketCols);
    + this.bucketCols = bucketCols;
          this.sortCols = sortCols;
    - this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
    + this.parameters = parameters;
        }

        /**
    @@ -242,13 +242,13 @@ public class StorageDescriptor implement
            this.cols = __this__cols;
          }
          if (other.isSetLocation()) {
    - this.location = org.apache.hive.common.util.HiveStringUtils.intern(other.location);
    + this.location = other.location;
          }
          if (other.isSetInputFormat()) {
    - this.inputFormat = org.apache.hive.common.util.HiveStringUtils.intern(other.inputFormat);
    + this.inputFormat = other.inputFormat;
          }
          if (other.isSetOutputFormat()) {
    - this.outputFormat = org.apache.hive.common.util.HiveStringUtils.intern(other.outputFormat);
    + this.outputFormat = other.outputFormat;
          }
          this.compressed = other.compressed;
          this.numBuckets = other.numBuckets;
    @@ -276,9 +276,9 @@ public class StorageDescriptor implement
              String other_element_key = other_element.getKey();
              String other_element_value = other_element.getValue();

    - String __this__parameters_copy_key = org.apache.hive.common.util.HiveStringUtils.intern(other_element_key);
    + String __this__parameters_copy_key = other_element_key;

    - String __this__parameters_copy_value = org.apache.hive.common.util.HiveStringUtils.intern(other_element_value);
    + String __this__parameters_copy_value = other_element_value;

              __this__parameters.put(__this__parameters_copy_key, __this__parameters_copy_value);
            }
    @@ -356,7 +356,7 @@ public class StorageDescriptor implement
        }

        public void setLocation(String location) {
    - this.location = org.apache.hive.common.util.HiveStringUtils.intern(location);
    + this.location = location;
        }

        public void unsetLocation() {
    @@ -379,7 +379,7 @@ public class StorageDescriptor implement
        }

        public void setInputFormat(String inputFormat) {
    - this.inputFormat = org.apache.hive.common.util.HiveStringUtils.intern(inputFormat);
    + this.inputFormat = inputFormat;
        }

        public void unsetInputFormat() {
    @@ -402,7 +402,7 @@ public class StorageDescriptor implement
        }

        public void setOutputFormat(String outputFormat) {
    - this.outputFormat = org.apache.hive.common.util.HiveStringUtils.intern(outputFormat);
    + this.outputFormat = outputFormat;
        }

        public void unsetOutputFormat() {
    @@ -507,7 +507,7 @@ public class StorageDescriptor implement
        }

        public void setBucketCols(List<String> bucketCols) {
    - this.bucketCols = org.apache.hive.common.util.HiveStringUtils.intern(bucketCols);
    + this.bucketCols = bucketCols;
        }

        public void unsetBucketCols() {
    @@ -579,7 +579,7 @@ public class StorageDescriptor implement
        }

        public void setParameters(Map<String,String> parameters) {
    - this.parameters = org.apache.hive.common.util.HiveStringUtils.intern(parameters);
    + this.parameters = parameters;
        }

        public void unsetParameters() {
    @@ -1304,7 +1304,7 @@ public class StorageDescriptor implement
                      struct.cols = new ArrayList<FieldSchema>(_list156.size);
                      for (int _i157 = 0; _i157 < _list156.size; ++_i157)
                      {
    - FieldSchema _elem158; // required
    + FieldSchema _elem158; // optional
                        _elem158 = new FieldSchema();
                        _elem158.read(iprot);
                        struct.cols.add(_elem158);
    @@ -1372,7 +1372,7 @@ public class StorageDescriptor implement
                      struct.bucketCols = new ArrayList<String>(_list159.size);
                      for (int _i160 = 0; _i160 < _list159.size; ++_i160)
                      {
    - String _elem161; // required
    + String _elem161; // optional
                        _elem161 = iprot.readString();
                        struct.bucketCols.add(_elem161);
                      }
    @@ -1390,7 +1390,7 @@ public class StorageDescriptor implement
                      struct.sortCols = new ArrayList<Order>(_list162.size);
                      for (int _i163 = 0; _i163 < _list162.size; ++_i163)
                      {
    - Order _elem164; // required
    + Order _elem164; // optional
                        _elem164 = new Order();
                        _elem164.read(iprot);
                        struct.sortCols.add(_elem164);
    @@ -1667,7 +1667,7 @@ public class StorageDescriptor implement
                struct.cols = new ArrayList<FieldSchema>(_list177.size);
                for (int _i178 = 0; _i178 < _list177.size; ++_i178)
                {
    - FieldSchema _elem179; // required
    + FieldSchema _elem179; // optional
                  _elem179 = new FieldSchema();
                  _elem179.read(iprot);
                  struct.cols.add(_elem179);
    @@ -1706,7 +1706,7 @@ public class StorageDescriptor implement
                struct.bucketCols = new ArrayList<String>(_list180.size);
                for (int _i181 = 0; _i181 < _list180.size; ++_i181)
                {
    - String _elem182; // required
    + String _elem182; // optional
                  _elem182 = iprot.readString();
                  struct.bucketCols.add(_elem182);
                }
    @@ -1719,7 +1719,7 @@ public class StorageDescriptor implement
                struct.sortCols = new ArrayList<Order>(_list183.size);
                for (int _i184 = 0; _i184 < _list183.size; ++_i184)
                {
    - Order _elem185; // required
    + Order _elem185; // optional
                  _elem185 = new Order();
                  _elem185.read(iprot);
                  struct.sortCols.add(_elem185);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java Tue Apr 14 20:47:29 2015
    @@ -1481,7 +1481,7 @@ public class Table implements org.apache
                      struct.partitionKeys = new ArrayList<FieldSchema>(_list190.size);
                      for (int _i191 = 0; _i191 < _list190.size; ++_i191)
                      {
    - FieldSchema _elem192; // required
    + FieldSchema _elem192; // optional
                        _elem192 = new FieldSchema();
                        _elem192.read(iprot);
                        struct.partitionKeys.add(_elem192);
    @@ -1805,7 +1805,7 @@ public class Table implements org.apache
                struct.partitionKeys = new ArrayList<FieldSchema>(_list201.size);
                for (int _i202 = 0; _i202 < _list201.size; ++_i202)
                {
    - FieldSchema _elem203; // required
    + FieldSchema _elem203; // optional
                  _elem203 = new FieldSchema();
                  _elem203.read(iprot);
                  struct.partitionKeys.add(_elem203);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java Tue Apr 14 20:47:29 2015
    @@ -540,7 +540,7 @@ public class TableStatsRequest implement
                      struct.colNames = new ArrayList<String>(_list356.size);
                      for (int _i357 = 0; _i357 < _list356.size; ++_i357)
                      {
    - String _elem358; // required
    + String _elem358; // optional
                        _elem358 = iprot.readString();
                        struct.colNames.add(_elem358);
                      }
    @@ -626,7 +626,7 @@ public class TableStatsRequest implement
              struct.colNames = new ArrayList<String>(_list361.size);
              for (int _i362 = 0; _i362 < _list361.size; ++_i362)
              {
    - String _elem363; // required
    + String _elem363; // optional
                _elem363 = iprot.readString();
                struct.colNames.add(_elem363);
              }

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsResult.java Tue Apr 14 20:47:29 2015
    @@ -354,7 +354,7 @@ public class TableStatsResult implements
                      struct.tableStats = new ArrayList<ColumnStatisticsObj>(_list330.size);
                      for (int _i331 = 0; _i331 < _list330.size; ++_i331)
                      {
    - ColumnStatisticsObj _elem332; // required
    + ColumnStatisticsObj _elem332; // optional
                        _elem332 = new ColumnStatisticsObj();
                        _elem332.read(iprot);
                        struct.tableStats.add(_elem332);
    @@ -425,7 +425,7 @@ public class TableStatsResult implements
              struct.tableStats = new ArrayList<ColumnStatisticsObj>(_list335.size);
              for (int _i336 = 0; _i336 < _list335.size; ++_i336)
              {
    - ColumnStatisticsObj _elem337; // required
    + ColumnStatisticsObj _elem337; // optional
                _elem337 = new ColumnStatisticsObj();
                _elem337.read(iprot);
                struct.tableStats.add(_elem337);
  • Jdere at Apr 14, 2015 at 8:47 pm
    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java Tue Apr 14 20:47:29 2015
    @@ -17328,7 +17328,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list524.size);
                        for (int _i525 = 0; _i525 < _list524.size; ++_i525)
                        {
    - String _elem526; // required
    + String _elem526; // optional
                          _elem526 = iprot.readString();
                          struct.success.add(_elem526);
                        }
    @@ -17427,7 +17427,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list529.size);
                  for (int _i530 = 0; _i530 < _list529.size; ++_i530)
                  {
    - String _elem531; // required
    + String _elem531; // optional
                    _elem531 = iprot.readString();
                    struct.success.add(_elem531);
                  }
    @@ -18090,7 +18090,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list532.size);
                        for (int _i533 = 0; _i533 < _list532.size; ++_i533)
                        {
    - String _elem534; // required
    + String _elem534; // optional
                          _elem534 = iprot.readString();
                          struct.success.add(_elem534);
                        }
    @@ -18189,7 +18189,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list537.size);
                  for (int _i538 = 0; _i538 < _list537.size; ++_i538)
                  {
    - String _elem539; // required
    + String _elem539; // optional
                    _elem539 = iprot.readString();
                    struct.success.add(_elem539);
                  }
    @@ -23950,7 +23950,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<FieldSchema>(_list550.size);
                        for (int _i551 = 0; _i551 < _list550.size; ++_i551)
                        {
    - FieldSchema _elem552; // required
    + FieldSchema _elem552; // optional
                          _elem552 = new FieldSchema();
                          _elem552.read(iprot);
                          struct.success.add(_elem552);
    @@ -24090,7 +24090,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<FieldSchema>(_list555.size);
                  for (int _i556 = 0; _i556 < _list555.size; ++_i556)
                  {
    - FieldSchema _elem557; // required
    + FieldSchema _elem557; // optional
                    _elem557 = new FieldSchema();
                    _elem557.read(iprot);
                    struct.success.add(_elem557);
    @@ -25251,7 +25251,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<FieldSchema>(_list558.size);
                        for (int _i559 = 0; _i559 < _list558.size; ++_i559)
                        {
    - FieldSchema _elem560; // required
    + FieldSchema _elem560; // optional
                          _elem560 = new FieldSchema();
                          _elem560.read(iprot);
                          struct.success.add(_elem560);
    @@ -25391,7 +25391,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<FieldSchema>(_list563.size);
                  for (int _i564 = 0; _i564 < _list563.size; ++_i564)
                  {
    - FieldSchema _elem565; // required
    + FieldSchema _elem565; // optional
                    _elem565 = new FieldSchema();
                    _elem565.read(iprot);
                    struct.success.add(_elem565);
    @@ -26443,7 +26443,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<FieldSchema>(_list566.size);
                        for (int _i567 = 0; _i567 < _list566.size; ++_i567)
                        {
    - FieldSchema _elem568; // required
    + FieldSchema _elem568; // optional
                          _elem568 = new FieldSchema();
                          _elem568.read(iprot);
                          struct.success.add(_elem568);
    @@ -26583,7 +26583,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<FieldSchema>(_list571.size);
                  for (int _i572 = 0; _i572 < _list571.size; ++_i572)
                  {
    - FieldSchema _elem573; // required
    + FieldSchema _elem573; // optional
                    _elem573 = new FieldSchema();
                    _elem573.read(iprot);
                    struct.success.add(_elem573);
    @@ -27744,7 +27744,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<FieldSchema>(_list574.size);
                        for (int _i575 = 0; _i575 < _list574.size; ++_i575)
                        {
    - FieldSchema _elem576; // required
    + FieldSchema _elem576; // optional
                          _elem576 = new FieldSchema();
                          _elem576.read(iprot);
                          struct.success.add(_elem576);
    @@ -27884,7 +27884,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<FieldSchema>(_list579.size);
                  for (int _i580 = 0; _i580 < _list579.size; ++_i580)
                  {
    - FieldSchema _elem581; // required
    + FieldSchema _elem581; // optional
                    _elem581 = new FieldSchema();
                    _elem581.read(iprot);
                    struct.success.add(_elem581);
    @@ -33134,7 +33134,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list582.size);
                        for (int _i583 = 0; _i583 < _list582.size; ++_i583)
                        {
    - String _elem584; // required
    + String _elem584; // optional
                          _elem584 = iprot.readString();
                          struct.success.add(_elem584);
                        }
    @@ -33233,7 +33233,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list587.size);
                  for (int _i588 = 0; _i588 < _list587.size; ++_i588)
                  {
    - String _elem589; // required
    + String _elem589; // optional
                    _elem589 = iprot.readString();
                    struct.success.add(_elem589);
                  }
    @@ -34008,7 +34008,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list590.size);
                        for (int _i591 = 0; _i591 < _list590.size; ++_i591)
                        {
    - String _elem592; // required
    + String _elem592; // optional
                          _elem592 = iprot.readString();
                          struct.success.add(_elem592);
                        }
    @@ -34107,7 +34107,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list595.size);
                  for (int _i596 = 0; _i596 < _list595.size; ++_i596)
                  {
    - String _elem597; // required
    + String _elem597; // optional
                    _elem597 = iprot.readString();
                    struct.success.add(_elem597);
                  }
    @@ -35569,7 +35569,7 @@ public class ThriftHiveMetastore {
                        struct.tbl_names = new ArrayList<String>(_list598.size);
                        for (int _i599 = 0; _i599 < _list598.size; ++_i599)
                        {
    - String _elem600; // required
    + String _elem600; // optional
                          _elem600 = iprot.readString();
                          struct.tbl_names.add(_elem600);
                        }
    @@ -35663,7 +35663,7 @@ public class ThriftHiveMetastore {
                  struct.tbl_names = new ArrayList<String>(_list603.size);
                  for (int _i604 = 0; _i604 < _list603.size; ++_i604)
                  {
    - String _elem605; // required
    + String _elem605; // optional
                    _elem605 = iprot.readString();
                    struct.tbl_names.add(_elem605);
                  }
    @@ -36237,7 +36237,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<Table>(_list606.size);
                        for (int _i607 = 0; _i607 < _list606.size; ++_i607)
                        {
    - Table _elem608; // required
    + Table _elem608; // optional
                          _elem608 = new Table();
                          _elem608.read(iprot);
                          struct.success.add(_elem608);
    @@ -36377,7 +36377,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<Table>(_list611.size);
                  for (int _i612 = 0; _i612 < _list611.size; ++_i612)
                  {
    - Table _elem613; // required
    + Table _elem613; // optional
                    _elem613 = new Table();
                    _elem613.read(iprot);
                    struct.success.add(_elem613);
    @@ -37533,7 +37533,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list614.size);
                        for (int _i615 = 0; _i615 < _list614.size; ++_i615)
                        {
    - String _elem616; // required
    + String _elem616; // optional
                          _elem616 = iprot.readString();
                          struct.success.add(_elem616);
                        }
    @@ -37672,7 +37672,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list619.size);
                  for (int _i620 = 0; _i620 < _list619.size; ++_i620)
                  {
    - String _elem621; // required
    + String _elem621; // optional
                    _elem621 = iprot.readString();
                    struct.success.add(_elem621);
                  }
    @@ -43537,7 +43537,7 @@ public class ThriftHiveMetastore {
                        struct.new_parts = new ArrayList<Partition>(_list622.size);
                        for (int _i623 = 0; _i623 < _list622.size; ++_i623)
                        {
    - Partition _elem624; // required
    + Partition _elem624; // optional
                          _elem624 = new Partition();
                          _elem624.read(iprot);
                          struct.new_parts.add(_elem624);
    @@ -43617,7 +43617,7 @@ public class ThriftHiveMetastore {
                  struct.new_parts = new ArrayList<Partition>(_list627.size);
                  for (int _i628 = 0; _i628 < _list627.size; ++_i628)
                  {
    - Partition _elem629; // required
    + Partition _elem629; // optional
                    _elem629 = new Partition();
                    _elem629.read(iprot);
                    struct.new_parts.add(_elem629);
    @@ -44625,7 +44625,7 @@ public class ThriftHiveMetastore {
                        struct.new_parts = new ArrayList<PartitionSpec>(_list630.size);
                        for (int _i631 = 0; _i631 < _list630.size; ++_i631)
                        {
    - PartitionSpec _elem632; // required
    + PartitionSpec _elem632; // optional
                          _elem632 = new PartitionSpec();
                          _elem632.read(iprot);
                          struct.new_parts.add(_elem632);
    @@ -44705,7 +44705,7 @@ public class ThriftHiveMetastore {
                  struct.new_parts = new ArrayList<PartitionSpec>(_list635.size);
                  for (int _i636 = 0; _i636 < _list635.size; ++_i636)
                  {
    - PartitionSpec _elem637; // required
    + PartitionSpec _elem637; // optional
                    _elem637 = new PartitionSpec();
                    _elem637.read(iprot);
                    struct.new_parts.add(_elem637);
    @@ -45891,7 +45891,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list638.size);
                        for (int _i639 = 0; _i639 < _list638.size; ++_i639)
                        {
    - String _elem640; // required
    + String _elem640; // optional
                          _elem640 = iprot.readString();
                          struct.part_vals.add(_elem640);
                        }
    @@ -46000,7 +46000,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list643.size);
                  for (int _i644 = 0; _i644 < _list643.size; ++_i644)
                  {
    - String _elem645; // required
    + String _elem645; // optional
                    _elem645 = iprot.readString();
                    struct.part_vals.add(_elem645);
                  }
    @@ -48318,7 +48318,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list646.size);
                        for (int _i647 = 0; _i647 < _list646.size; ++_i647)
                        {
    - String _elem648; // required
    + String _elem648; // optional
                          _elem648 = iprot.readString();
                          struct.part_vals.add(_elem648);
                        }
    @@ -48447,7 +48447,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list651.size);
                  for (int _i652 = 0; _i652 < _list651.size; ++_i652)
                  {
    - String _elem653; // required
    + String _elem653; // optional
                    _elem653 = iprot.readString();
                    struct.part_vals.add(_elem653);
                  }
    @@ -52326,7 +52326,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list654.size);
                        for (int _i655 = 0; _i655 < _list654.size; ++_i655)
                        {
    - String _elem656; // required
    + String _elem656; // optional
                          _elem656 = iprot.readString();
                          struct.part_vals.add(_elem656);
                        }
    @@ -52452,7 +52452,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list659.size);
                  for (int _i660 = 0; _i660 < _list659.size; ++_i660)
                  {
    - String _elem661; // required
    + String _elem661; // optional
                    _elem661 = iprot.readString();
                    struct.part_vals.add(_elem661);
                  }
    @@ -53700,7 +53700,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list662.size);
                        for (int _i663 = 0; _i663 < _list662.size; ++_i663)
                        {
    - String _elem664; // required
    + String _elem664; // optional
                          _elem664 = iprot.readString();
                          struct.part_vals.add(_elem664);
                        }
    @@ -53846,7 +53846,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list667.size);
                  for (int _i668 = 0; _i668 < _list667.size; ++_i668)
                  {
    - String _elem669; // required
    + String _elem669; // optional
                    _elem669 = iprot.readString();
                    struct.part_vals.add(_elem669);
                  }
    @@ -58457,7 +58457,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list670.size);
                        for (int _i671 = 0; _i671 < _list670.size; ++_i671)
                        {
    - String _elem672; // required
    + String _elem672; // optional
                          _elem672 = iprot.readString();
                          struct.part_vals.add(_elem672);
                        }
    @@ -58566,7 +58566,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list675.size);
                  for (int _i676 = 0; _i676 < _list675.size; ++_i676)
                  {
    - String _elem677; // required
    + String _elem677; // optional
                    _elem677 = iprot.readString();
                    struct.part_vals.add(_elem677);
                  }
    @@ -61456,7 +61456,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list688.size);
                        for (int _i689 = 0; _i689 < _list688.size; ++_i689)
                        {
    - String _elem690; // required
    + String _elem690; // optional
                          _elem690 = iprot.readString();
                          struct.part_vals.add(_elem690);
                        }
    @@ -61482,7 +61482,7 @@ public class ThriftHiveMetastore {
                        struct.group_names = new ArrayList<String>(_list691.size);
                        for (int _i692 = 0; _i692 < _list691.size; ++_i692)
                        {
    - String _elem693; // required
    + String _elem693; // optional
                          _elem693 = iprot.readString();
                          struct.group_names.add(_elem693);
                        }
    @@ -61626,7 +61626,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list698.size);
                  for (int _i699 = 0; _i699 < _list698.size; ++_i699)
                  {
    - String _elem700; // required
    + String _elem700; // optional
                    _elem700 = iprot.readString();
                    struct.part_vals.add(_elem700);
                  }
    @@ -61643,7 +61643,7 @@ public class ThriftHiveMetastore {
                  struct.group_names = new ArrayList<String>(_list701.size);
                  for (int _i702 = 0; _i702 < _list701.size; ++_i702)
                  {
    - String _elem703; // required
    + String _elem703; // optional
                    _elem703 = iprot.readString();
                    struct.group_names.add(_elem703);
                  }
    @@ -64418,7 +64418,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<Partition>(_list704.size);
                        for (int _i705 = 0; _i705 < _list704.size; ++_i705)
                        {
    - Partition _elem706; // required
    + Partition _elem706; // optional
                          _elem706 = new Partition();
                          _elem706.read(iprot);
                          struct.success.add(_elem706);
    @@ -64538,7 +64538,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<Partition>(_list709.size);
                  for (int _i710 = 0; _i710 < _list709.size; ++_i710)
                  {
    - Partition _elem711; // required
    + Partition _elem711; // optional
                    _elem711 = new Partition();
                    _elem711.read(iprot);
                    struct.success.add(_elem711);
    @@ -65238,7 +65238,7 @@ public class ThriftHiveMetastore {
                        struct.group_names = new ArrayList<String>(_list712.size);
                        for (int _i713 = 0; _i713 < _list712.size; ++_i713)
                        {
    - String _elem714; // required
    + String _elem714; // optional
                          _elem714 = iprot.readString();
                          struct.group_names.add(_elem714);
                        }
    @@ -65375,7 +65375,7 @@ public class ThriftHiveMetastore {
                  struct.group_names = new ArrayList<String>(_list717.size);
                  for (int _i718 = 0; _i718 < _list717.size; ++_i718)
                  {
    - String _elem719; // required
    + String _elem719; // optional
                    _elem719 = iprot.readString();
                    struct.group_names.add(_elem719);
                  }
    @@ -65868,7 +65868,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<Partition>(_list720.size);
                        for (int _i721 = 0; _i721 < _list720.size; ++_i721)
                        {
    - Partition _elem722; // required
    + Partition _elem722; // optional
                          _elem722 = new Partition();
                          _elem722.read(iprot);
                          struct.success.add(_elem722);
    @@ -65988,7 +65988,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<Partition>(_list725.size);
                  for (int _i726 = 0; _i726 < _list725.size; ++_i726)
                  {
    - Partition _elem727; // required
    + Partition _elem727; // optional
                    _elem727 = new Partition();
                    _elem727.read(iprot);
                    struct.success.add(_elem727);
    @@ -67058,7 +67058,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<PartitionSpec>(_list728.size);
                        for (int _i729 = 0; _i729 < _list728.size; ++_i729)
                        {
    - PartitionSpec _elem730; // required
    + PartitionSpec _elem730; // optional
                          _elem730 = new PartitionSpec();
                          _elem730.read(iprot);
                          struct.success.add(_elem730);
    @@ -67178,7 +67178,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<PartitionSpec>(_list733.size);
                  for (int _i734 = 0; _i734 < _list733.size; ++_i734)
                  {
    - PartitionSpec _elem735; // required
    + PartitionSpec _elem735; // optional
                    _elem735 = new PartitionSpec();
                    _elem735.read(iprot);
                    struct.success.add(_elem735);
    @@ -68167,7 +68167,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list736.size);
                        for (int _i737 = 0; _i737 < _list736.size; ++_i737)
                        {
    - String _elem738; // required
    + String _elem738; // optional
                          _elem738 = iprot.readString();
                          struct.success.add(_elem738);
                        }
    @@ -68266,7 +68266,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list741.size);
                  for (int _i742 = 0; _i742 < _list741.size; ++_i742)
                  {
    - String _elem743; // required
    + String _elem743; // optional
                    _elem743 = iprot.readString();
                    struct.success.add(_elem743);
                  }
    @@ -68863,7 +68863,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list744.size);
                        for (int _i745 = 0; _i745 < _list744.size; ++_i745)
                        {
    - String _elem746; // required
    + String _elem746; // optional
                          _elem746 = iprot.readString();
                          struct.part_vals.add(_elem746);
                        }
    @@ -68989,7 +68989,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list749.size);
                  for (int _i750 = 0; _i750 < _list749.size; ++_i750)
                  {
    - String _elem751; // required
    + String _elem751; // optional
                    _elem751 = iprot.readString();
                    struct.part_vals.add(_elem751);
                  }
    @@ -69486,7 +69486,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<Partition>(_list752.size);
                        for (int _i753 = 0; _i753 < _list752.size; ++_i753)
                        {
    - Partition _elem754; // required
    + Partition _elem754; // optional
                          _elem754 = new Partition();
                          _elem754.read(iprot);
                          struct.success.add(_elem754);
    @@ -69606,7 +69606,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<Partition>(_list757.size);
                  for (int _i758 = 0; _i758 < _list757.size; ++_i758)
                  {
    - Partition _elem759; // required
    + Partition _elem759; // optional
                    _elem759 = new Partition();
                    _elem759.read(iprot);
                    struct.success.add(_elem759);
    @@ -70391,7 +70391,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list760.size);
                        for (int _i761 = 0; _i761 < _list760.size; ++_i761)
                        {
    - String _elem762; // required
    + String _elem762; // optional
                          _elem762 = iprot.readString();
                          struct.part_vals.add(_elem762);
                        }
    @@ -70425,7 +70425,7 @@ public class ThriftHiveMetastore {
                        struct.group_names = new ArrayList<String>(_list763.size);
                        for (int _i764 = 0; _i764 < _list763.size; ++_i764)
                        {
    - String _elem765; // required
    + String _elem765; // optional
                          _elem765 = iprot.readString();
                          struct.group_names.add(_elem765);
                        }
    @@ -70578,7 +70578,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list770.size);
                  for (int _i771 = 0; _i771 < _list770.size; ++_i771)
                  {
    - String _elem772; // required
    + String _elem772; // optional
                    _elem772 = iprot.readString();
                    struct.part_vals.add(_elem772);
                  }
    @@ -70599,7 +70599,7 @@ public class ThriftHiveMetastore {
                  struct.group_names = new ArrayList<String>(_list773.size);
                  for (int _i774 = 0; _i774 < _list773.size; ++_i774)
                  {
    - String _elem775; // required
    + String _elem775; // optional
                    _elem775 = iprot.readString();
                    struct.group_names.add(_elem775);
                  }
    @@ -71092,7 +71092,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<Partition>(_list776.size);
                        for (int _i777 = 0; _i777 < _list776.size; ++_i777)
                        {
    - Partition _elem778; // required
    + Partition _elem778; // optional
                          _elem778 = new Partition();
                          _elem778.read(iprot);
                          struct.success.add(_elem778);
    @@ -71212,7 +71212,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<Partition>(_list781.size);
                  for (int _i782 = 0; _i782 < _list781.size; ++_i782)
                  {
    - Partition _elem783; // required
    + Partition _elem783; // optional
                    _elem783 = new Partition();
                    _elem783.read(iprot);
                    struct.success.add(_elem783);
    @@ -71815,7 +71815,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list784.size);
                        for (int _i785 = 0; _i785 < _list784.size; ++_i785)
                        {
    - String _elem786; // required
    + String _elem786; // optional
                          _elem786 = iprot.readString();
                          struct.part_vals.add(_elem786);
                        }
    @@ -71941,7 +71941,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list789.size);
                  for (int _i790 = 0; _i790 < _list789.size; ++_i790)
                  {
    - String _elem791; // required
    + String _elem791; // optional
                    _elem791 = iprot.readString();
                    struct.part_vals.add(_elem791);
                  }
    @@ -72438,7 +72438,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list792.size);
                        for (int _i793 = 0; _i793 < _list792.size; ++_i793)
                        {
    - String _elem794; // required
    + String _elem794; // optional
                          _elem794 = iprot.readString();
                          struct.success.add(_elem794);
                        }
    @@ -72557,7 +72557,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list797.size);
                  for (int _i798 = 0; _i798 < _list797.size; ++_i798)
                  {
    - String _elem799; // required
    + String _elem799; // optional
                    _elem799 = iprot.readString();
                    struct.success.add(_elem799);
                  }
    @@ -73730,7 +73730,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<Partition>(_list800.size);
                        for (int _i801 = 0; _i801 < _list800.size; ++_i801)
                        {
    - Partition _elem802; // required
    + Partition _elem802; // optional
                          _elem802 = new Partition();
                          _elem802.read(iprot);
                          struct.success.add(_elem802);
    @@ -73850,7 +73850,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<Partition>(_list805.size);
                  for (int _i806 = 0; _i806 < _list805.size; ++_i806)
                  {
    - Partition _elem807; // required
    + Partition _elem807; // optional
                    _elem807 = new Partition();
                    _elem807.read(iprot);
                    struct.success.add(_elem807);
    @@ -75024,7 +75024,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<PartitionSpec>(_list808.size);
                        for (int _i809 = 0; _i809 < _list808.size; ++_i809)
                        {
    - PartitionSpec _elem810; // required
    + PartitionSpec _elem810; // optional
                          _elem810 = new PartitionSpec();
                          _elem810.read(iprot);
                          struct.success.add(_elem810);
    @@ -75144,7 +75144,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<PartitionSpec>(_list813.size);
                  for (int _i814 = 0; _i814 < _list813.size; ++_i814)
                  {
    - PartitionSpec _elem815; // required
    + PartitionSpec _elem815; // optional
                    _elem815 = new PartitionSpec();
                    _elem815.read(iprot);
                    struct.success.add(_elem815);
    @@ -76602,7 +76602,7 @@ public class ThriftHiveMetastore {
                        struct.names = new ArrayList<String>(_list816.size);
                        for (int _i817 = 0; _i817 < _list816.size; ++_i817)
                        {
    - String _elem818; // required
    + String _elem818; // optional
                          _elem818 = iprot.readString();
                          struct.names.add(_elem818);
                        }
    @@ -76711,7 +76711,7 @@ public class ThriftHiveMetastore {
                  struct.names = new ArrayList<String>(_list821.size);
                  for (int _i822 = 0; _i822 < _list821.size; ++_i822)
                  {
    - String _elem823; // required
    + String _elem823; // optional
                    _elem823 = iprot.readString();
                    struct.names.add(_elem823);
                  }
    @@ -77204,7 +77204,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<Partition>(_list824.size);
                        for (int _i825 = 0; _i825 < _list824.size; ++_i825)
                        {
    - Partition _elem826; // required
    + Partition _elem826; // optional
                          _elem826 = new Partition();
                          _elem826.read(iprot);
                          struct.success.add(_elem826);
    @@ -77324,7 +77324,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<Partition>(_list829.size);
                  for (int _i830 = 0; _i830 < _list829.size; ++_i830)
                  {
    - Partition _elem831; // required
    + Partition _elem831; // optional
                    _elem831 = new Partition();
                    _elem831.read(iprot);
                    struct.success.add(_elem831);
    @@ -78881,7 +78881,7 @@ public class ThriftHiveMetastore {
                        struct.new_parts = new ArrayList<Partition>(_list832.size);
                        for (int _i833 = 0; _i833 < _list832.size; ++_i833)
                        {
    - Partition _elem834; // required
    + Partition _elem834; // optional
                          _elem834 = new Partition();
                          _elem834.read(iprot);
                          struct.new_parts.add(_elem834);
    @@ -78991,7 +78991,7 @@ public class ThriftHiveMetastore {
                  struct.new_parts = new ArrayList<Partition>(_list837.size);
                  for (int _i838 = 0; _i838 < _list837.size; ++_i838)
                  {
    - Partition _elem839; // required
    + Partition _elem839; // optional
                    _elem839 = new Partition();
                    _elem839.read(iprot);
                    struct.new_parts.add(_elem839);
    @@ -81197,7 +81197,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list840.size);
                        for (int _i841 = 0; _i841 < _list840.size; ++_i841)
                        {
    - String _elem842; // required
    + String _elem842; // optional
                          _elem842 = iprot.readString();
                          struct.part_vals.add(_elem842);
                        }
    @@ -81326,7 +81326,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list845.size);
                  for (int _i846 = 0; _i846 < _list845.size; ++_i846)
                  {
    - String _elem847; // required
    + String _elem847; // optional
                    _elem847 = iprot.readString();
                    struct.part_vals.add(_elem847);
                  }
    @@ -82209,7 +82209,7 @@ public class ThriftHiveMetastore {
                        struct.part_vals = new ArrayList<String>(_list848.size);
                        for (int _i849 = 0; _i849 < _list848.size; ++_i849)
                        {
    - String _elem850; // required
    + String _elem850; // optional
                          _elem850 = iprot.readString();
                          struct.part_vals.add(_elem850);
                        }
    @@ -82305,7 +82305,7 @@ public class ThriftHiveMetastore {
                  struct.part_vals = new ArrayList<String>(_list853.size);
                  for (int _i854 = 0; _i854 < _list853.size; ++_i854)
                  {
    - String _elem855; // required
    + String _elem855; // optional
                    _elem855 = iprot.readString();
                    struct.part_vals.add(_elem855);
                  }
    @@ -84469,7 +84469,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list856.size);
                        for (int _i857 = 0; _i857 < _list856.size; ++_i857)
                        {
    - String _elem858; // required
    + String _elem858; // optional
                          _elem858 = iprot.readString();
                          struct.success.add(_elem858);
                        }
    @@ -84568,7 +84568,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list861.size);
                  for (int _i862 = 0; _i862 < _list861.size; ++_i862)
                  {
    - String _elem863; // required
    + String _elem863; // optional
                    _elem863 = iprot.readString();
                    struct.success.add(_elem863);
                  }
    @@ -94564,7 +94564,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<Index>(_list894.size);
                        for (int _i895 = 0; _i895 < _list894.size; ++_i895)
                        {
    - Index _elem896; // required
    + Index _elem896; // optional
                          _elem896 = new Index();
                          _elem896.read(iprot);
                          struct.success.add(_elem896);
    @@ -94684,7 +94684,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<Index>(_list899.size);
                  for (int _i900 = 0; _i900 < _list899.size; ++_i900)
                  {
    - Index _elem901; // required
    + Index _elem901; // optional
                    _elem901 = new Index();
                    _elem901.read(iprot);
                    struct.success.add(_elem901);
    @@ -95673,7 +95673,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list902.size);
                        for (int _i903 = 0; _i903 < _list902.size; ++_i903)
                        {
    - String _elem904; // required
    + String _elem904; // optional
                          _elem904 = iprot.readString();
                          struct.success.add(_elem904);
                        }
    @@ -95772,7 +95772,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list907.size);
                  for (int _i908 = 0; _i908 < _list907.size; ++_i908)
                  {
    - String _elem909; // required
    + String _elem909; // optional
                    _elem909 = iprot.readString();
                    struct.success.add(_elem909);
                  }
    @@ -111516,7 +111516,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list910.size);
                        for (int _i911 = 0; _i911 < _list910.size; ++_i911)
                        {
    - String _elem912; // required
    + String _elem912; // optional
                          _elem912 = iprot.readString();
                          struct.success.add(_elem912);
                        }
    @@ -111615,7 +111615,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list915.size);
                  for (int _i916 = 0; _i916 < _list915.size; ++_i916)
                  {
    - String _elem917; // required
    + String _elem917; // optional
                    _elem917 = iprot.readString();
                    struct.success.add(_elem917);
                  }
    @@ -114964,7 +114964,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list918.size);
                        for (int _i919 = 0; _i919 < _list918.size; ++_i919)
                        {
    - String _elem920; // required
    + String _elem920; // optional
                          _elem920 = iprot.readString();
                          struct.success.add(_elem920);
                        }
    @@ -115063,7 +115063,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list923.size);
                  for (int _i924 = 0; _i924 < _list923.size; ++_i924)
                  {
    - String _elem925; // required
    + String _elem925; // optional
                    _elem925 = iprot.readString();
                    struct.success.add(_elem925);
                  }
    @@ -118360,7 +118360,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<Role>(_list926.size);
                        for (int _i927 = 0; _i927 < _list926.size; ++_i927)
                        {
    - Role _elem928; // required
    + Role _elem928; // optional
                          _elem928 = new Role();
                          _elem928.read(iprot);
                          struct.success.add(_elem928);
    @@ -118460,7 +118460,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<Role>(_list931.size);
                  for (int _i932 = 0; _i932 < _list931.size; ++_i932)
                  {
    - Role _elem933; // required
    + Role _elem933; // optional
                    _elem933 = new Role();
                    _elem933.read(iprot);
                    struct.success.add(_elem933);
    @@ -121475,7 +121475,7 @@ public class ThriftHiveMetastore {
                        struct.group_names = new ArrayList<String>(_list934.size);
                        for (int _i935 = 0; _i935 < _list934.size; ++_i935)
                        {
    - String _elem936; // required
    + String _elem936; // optional
                          _elem936 = iprot.readString();
                          struct.group_names.add(_elem936);
                        }
    @@ -121585,7 +121585,7 @@ public class ThriftHiveMetastore {
                  struct.group_names = new ArrayList<String>(_list939.size);
                  for (int _i940 = 0; _i940 < _list939.size; ++_i940)
                  {
    - String _elem941; // required
    + String _elem941; // optional
                    _elem941 = iprot.readString();
                    struct.group_names.add(_elem941);
                  }
    @@ -123049,7 +123049,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<HiveObjectPrivilege>(_list942.size);
                        for (int _i943 = 0; _i943 < _list942.size; ++_i943)
                        {
    - HiveObjectPrivilege _elem944; // required
    + HiveObjectPrivilege _elem944; // optional
                          _elem944 = new HiveObjectPrivilege();
                          _elem944.read(iprot);
                          struct.success.add(_elem944);
    @@ -123149,7 +123149,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<HiveObjectPrivilege>(_list947.size);
                  for (int _i948 = 0; _i948 < _list947.size; ++_i948)
                  {
    - HiveObjectPrivilege _elem949; // required
    + HiveObjectPrivilege _elem949; // optional
                    _elem949 = new HiveObjectPrivilege();
                    _elem949.read(iprot);
                    struct.success.add(_elem949);
    @@ -126061,7 +126061,7 @@ public class ThriftHiveMetastore {
                        struct.group_names = new ArrayList<String>(_list950.size);
                        for (int _i951 = 0; _i951 < _list950.size; ++_i951)
                        {
    - String _elem952; // required
    + String _elem952; // optional
                          _elem952 = iprot.readString();
                          struct.group_names.add(_elem952);
                        }
    @@ -126155,7 +126155,7 @@ public class ThriftHiveMetastore {
                  struct.group_names = new ArrayList<String>(_list955.size);
                  for (int _i956 = 0; _i956 < _list955.size; ++_i956)
                  {
    - String _elem957; // required
    + String _elem957; // optional
                    _elem957 = iprot.readString();
                    struct.group_names.add(_elem957);
                  }
    @@ -126567,7 +126567,7 @@ public class ThriftHiveMetastore {
                        struct.success = new ArrayList<String>(_list958.size);
                        for (int _i959 = 0; _i959 < _list958.size; ++_i959)
                        {
    - String _elem960; // required
    + String _elem960; // optional
                          _elem960 = iprot.readString();
                          struct.success.add(_elem960);
                        }
    @@ -126666,7 +126666,7 @@ public class ThriftHiveMetastore {
                  struct.success = new ArrayList<String>(_list963.size);
                  for (int _i964 = 0; _i964 < _list963.size; ++_i964)
                  {
    - String _elem965; // required
    + String _elem965; // optional
                    _elem965 = iprot.readString();
                    struct.success.add(_elem965);
                  }

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java Tue Apr 14 20:47:29 2015
    @@ -618,7 +618,7 @@ public class Type implements org.apache.
                      struct.fields = new ArrayList<FieldSchema>(_list0.size);
                      for (int _i1 = 0; _i1 < _list0.size; ++_i1)
                      {
    - FieldSchema _elem2; // required
    + FieldSchema _elem2; // optional
                        _elem2 = new FieldSchema();
                        _elem2.read(iprot);
                        struct.fields.add(_elem2);
    @@ -749,7 +749,7 @@ public class Type implements org.apache.
                struct.fields = new ArrayList<FieldSchema>(_list5.size);
                for (int _i6 = 0; _i6 < _list5.size; ++_i6)
                {
    - FieldSchema _elem7; // required
    + FieldSchema _elem7; // optional
                  _elem7 = new FieldSchema();
                  _elem7.read(iprot);
                  struct.fields.add(_elem7);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-php/metastore/Types.php
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-php/metastore/Types.php?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-php/metastore/Types.php (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-php/metastore/Types.php Tue Apr 14 20:47:29 2015
    @@ -6074,6 +6074,220 @@ class DecimalColumnStatsData {

      }

    +class Date {
    + static $_TSPEC;
    +
    + public $daysSinceEpoch = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 1 => array(
    + 'var' => 'daysSinceEpoch',
    + 'type' => TType::I64,
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['daysSinceEpoch'])) {
    + $this->daysSinceEpoch = $vals['daysSinceEpoch'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'Date';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 1:
    + if ($ftype == TType::I64) {
    + $xfer += $input->readI64($this->daysSinceEpoch);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('Date');
    + if ($this->daysSinceEpoch !== null) {
    + $xfer += $output->writeFieldBegin('daysSinceEpoch', TType::I64, 1);
    + $xfer += $output->writeI64($this->daysSinceEpoch);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class DateColumnStatsData {
    + static $_TSPEC;
    +
    + public $lowValue = null;
    + public $highValue = null;
    + public $numNulls = null;
    + public $numDVs = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 1 => array(
    + 'var' => 'lowValue',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\Date',
    + ),
    + 2 => array(
    + 'var' => 'highValue',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\Date',
    + ),
    + 3 => array(
    + 'var' => 'numNulls',
    + 'type' => TType::I64,
    + ),
    + 4 => array(
    + 'var' => 'numDVs',
    + 'type' => TType::I64,
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['lowValue'])) {
    + $this->lowValue = $vals['lowValue'];
    + }
    + if (isset($vals['highValue'])) {
    + $this->highValue = $vals['highValue'];
    + }
    + if (isset($vals['numNulls'])) {
    + $this->numNulls = $vals['numNulls'];
    + }
    + if (isset($vals['numDVs'])) {
    + $this->numDVs = $vals['numDVs'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'DateColumnStatsData';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 1:
    + if ($ftype == TType::STRUCT) {
    + $this->lowValue = new \metastore\Date();
    + $xfer += $this->lowValue->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRUCT) {
    + $this->highValue = new \metastore\Date();
    + $xfer += $this->highValue->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 3:
    + if ($ftype == TType::I64) {
    + $xfer += $input->readI64($this->numNulls);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 4:
    + if ($ftype == TType::I64) {
    + $xfer += $input->readI64($this->numDVs);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('DateColumnStatsData');
    + if ($this->lowValue !== null) {
    + if (!is_object($this->lowValue)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('lowValue', TType::STRUCT, 1);
    + $xfer += $this->lowValue->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->highValue !== null) {
    + if (!is_object($this->highValue)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('highValue', TType::STRUCT, 2);
    + $xfer += $this->highValue->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->numNulls !== null) {
    + $xfer += $output->writeFieldBegin('numNulls', TType::I64, 3);
    + $xfer += $output->writeI64($this->numNulls);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->numDVs !== null) {
    + $xfer += $output->writeFieldBegin('numDVs', TType::I64, 4);
    + $xfer += $output->writeI64($this->numDVs);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
      class ColumnStatisticsData {
        static $_TSPEC;

    @@ -6083,6 +6297,7 @@ class ColumnStatisticsData {
        public $stringStats = null;
        public $binaryStats = null;
        public $decimalStats = null;
    + public $dateStats = null;

        public function __construct($vals=null) {
          if (!isset(self::$_TSPEC)) {
    @@ -6117,6 +6332,11 @@ class ColumnStatisticsData {
                'type' => TType::STRUCT,
                'class' => '\metastore\DecimalColumnStatsData',
                ),
    + 7 => array(
    + 'var' => 'dateStats',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\DateColumnStatsData',
    + ),
              );
          }
          if (is_array($vals)) {
    @@ -6138,6 +6358,9 @@ class ColumnStatisticsData {
            if (isset($vals['decimalStats'])) {
              $this->decimalStats = $vals['decimalStats'];
            }
    + if (isset($vals['dateStats'])) {
    + $this->dateStats = $vals['dateStats'];
    + }
          }
        }

    @@ -6208,6 +6431,14 @@ class ColumnStatisticsData {
                  $xfer += $input->skip($ftype);
                }
                break;
    + case 7:
    + if ($ftype == TType::STRUCT) {
    + $this->dateStats = new \metastore\DateColumnStatsData();
    + $xfer += $this->dateStats->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
              default:
                $xfer += $input->skip($ftype);
                break;
    @@ -6269,6 +6500,14 @@ class ColumnStatisticsData {
            $xfer += $this->decimalStats->write($output);
            $xfer += $output->writeFieldEnd();
          }
    + if ($this->dateStats !== null) {
    + if (!is_object($this->dateStats)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('dateStats', TType::STRUCT, 7);
    + $xfer += $this->dateStats->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
          $xfer += $output->writeFieldStop();
          $xfer += $output->writeStructEnd();
          return $xfer;

    Modified: hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py Tue Apr 14 20:47:29 2015
    @@ -4148,6 +4148,170 @@ class DecimalColumnStatsData:
        def __ne__(self, other):
          return not (self == other)

    +class Date:
    + """
    + Attributes:
    + - daysSinceEpoch
    + """
    +
    + thrift_spec = (
    + None, # 0
    + (1, TType.I64, 'daysSinceEpoch', None, None, ), # 1
    + )
    +
    + def __init__(self, daysSinceEpoch=None,):
    + self.daysSinceEpoch = daysSinceEpoch
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 1:
    + if ftype == TType.I64:
    + self.daysSinceEpoch = iprot.readI64();
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('Date')
    + if self.daysSinceEpoch is not None:
    + oprot.writeFieldBegin('daysSinceEpoch', TType.I64, 1)
    + oprot.writeI64(self.daysSinceEpoch)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + if self.daysSinceEpoch is None:
    + raise TProtocol.TProtocolException(message='Required field daysSinceEpoch is unset!')
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
    +class DateColumnStatsData:
    + """
    + Attributes:
    + - lowValue
    + - highValue
    + - numNulls
    + - numDVs
    + """
    +
    + thrift_spec = (
    + None, # 0
    + (1, TType.STRUCT, 'lowValue', (Date, Date.thrift_spec), None, ), # 1
    + (2, TType.STRUCT, 'highValue', (Date, Date.thrift_spec), None, ), # 2
    + (3, TType.I64, 'numNulls', None, None, ), # 3
    + (4, TType.I64, 'numDVs', None, None, ), # 4
    + )
    +
    + def __init__(self, lowValue=None, highValue=None, numNulls=None, numDVs=None,):
    + self.lowValue = lowValue
    + self.highValue = highValue
    + self.numNulls = numNulls
    + self.numDVs = numDVs
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 1:
    + if ftype == TType.STRUCT:
    + self.lowValue = Date()
    + self.lowValue.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRUCT:
    + self.highValue = Date()
    + self.highValue.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 3:
    + if ftype == TType.I64:
    + self.numNulls = iprot.readI64();
    + else:
    + iprot.skip(ftype)
    + elif fid == 4:
    + if ftype == TType.I64:
    + self.numDVs = iprot.readI64();
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('DateColumnStatsData')
    + if self.lowValue is not None:
    + oprot.writeFieldBegin('lowValue', TType.STRUCT, 1)
    + self.lowValue.write(oprot)
    + oprot.writeFieldEnd()
    + if self.highValue is not None:
    + oprot.writeFieldBegin('highValue', TType.STRUCT, 2)
    + self.highValue.write(oprot)
    + oprot.writeFieldEnd()
    + if self.numNulls is not None:
    + oprot.writeFieldBegin('numNulls', TType.I64, 3)
    + oprot.writeI64(self.numNulls)
    + oprot.writeFieldEnd()
    + if self.numDVs is not None:
    + oprot.writeFieldBegin('numDVs', TType.I64, 4)
    + oprot.writeI64(self.numDVs)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + if self.numNulls is None:
    + raise TProtocol.TProtocolException(message='Required field numNulls is unset!')
    + if self.numDVs is None:
    + raise TProtocol.TProtocolException(message='Required field numDVs is unset!')
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
      class ColumnStatisticsData:
        """
        Attributes:
    @@ -4157,6 +4321,7 @@ class ColumnStatisticsData:
         - stringStats
         - binaryStats
         - decimalStats
    + - dateStats
        """

        thrift_spec = (
    @@ -4167,15 +4332,17 @@ class ColumnStatisticsData:
          (4, TType.STRUCT, 'stringStats', (StringColumnStatsData, StringColumnStatsData.thrift_spec), None, ), # 4
          (5, TType.STRUCT, 'binaryStats', (BinaryColumnStatsData, BinaryColumnStatsData.thrift_spec), None, ), # 5
          (6, TType.STRUCT, 'decimalStats', (DecimalColumnStatsData, DecimalColumnStatsData.thrift_spec), None, ), # 6
    + (7, TType.STRUCT, 'dateStats', (DateColumnStatsData, DateColumnStatsData.thrift_spec), None, ), # 7
        )

    - def __init__(self, booleanStats=None, longStats=None, doubleStats=None, stringStats=None, binaryStats=None, decimalStats=None,):
    + def __init__(self, booleanStats=None, longStats=None, doubleStats=None, stringStats=None, binaryStats=None, decimalStats=None, dateStats=None,):
          self.booleanStats = booleanStats
          self.longStats = longStats
          self.doubleStats = doubleStats
          self.stringStats = stringStats
          self.binaryStats = binaryStats
          self.decimalStats = decimalStats
    + self.dateStats = dateStats

        def read(self, iprot):
          if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    @@ -4222,6 +4389,12 @@ class ColumnStatisticsData:
                self.decimalStats.read(iprot)
              else:
                iprot.skip(ftype)
    + elif fid == 7:
    + if ftype == TType.STRUCT:
    + self.dateStats = DateColumnStatsData()
    + self.dateStats.read(iprot)
    + else:
    + iprot.skip(ftype)
            else:
              iprot.skip(ftype)
            iprot.readFieldEnd()
    @@ -4256,6 +4429,10 @@ class ColumnStatisticsData:
            oprot.writeFieldBegin('decimalStats', TType.STRUCT, 6)
            self.decimalStats.write(oprot)
            oprot.writeFieldEnd()
    + if self.dateStats is not None:
    + oprot.writeFieldBegin('dateStats', TType.STRUCT, 7)
    + self.dateStats.write(oprot)
    + oprot.writeFieldEnd()
          oprot.writeFieldStop()
          oprot.writeStructEnd()


    Modified: hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb Tue Apr 14 20:47:29 2015
    @@ -977,6 +977,47 @@ class DecimalColumnStatsData
        ::Thrift::Struct.generate_accessors self
      end

    +class Date
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + DAYSSINCEEPOCH = 1
    +
    + FIELDS = {
    + DAYSSINCEEPOCH => {:type => ::Thrift::Types::I64, :name => 'daysSinceEpoch'}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field daysSinceEpoch is unset!') unless @daysSinceEpoch
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    +end
    +
    +class DateColumnStatsData
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + LOWVALUE = 1
    + HIGHVALUE = 2
    + NUMNULLS = 3
    + NUMDVS = 4
    +
    + FIELDS = {
    + LOWVALUE => {:type => ::Thrift::Types::STRUCT, :name => 'lowValue', :class => ::Date, :optional => true},
    + HIGHVALUE => {:type => ::Thrift::Types::STRUCT, :name => 'highValue', :class => ::Date, :optional => true},
    + NUMNULLS => {:type => ::Thrift::Types::I64, :name => 'numNulls'},
    + NUMDVS => {:type => ::Thrift::Types::I64, :name => 'numDVs'}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field numNulls is unset!') unless @numNulls
    + raise ::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required field numDVs is unset!') unless @numDVs
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    +end
    +
      class ColumnStatisticsData < ::Thrift::Union
        include ::Thrift::Struct_Union
        class << self
    @@ -1003,6 +1044,10 @@ class ColumnStatisticsData < ::Thrift::U
          def decimalStats(val)
            ColumnStatisticsData.new(:decimalStats, val)
          end
    +
    + def dateStats(val)
    + ColumnStatisticsData.new(:dateStats, val)
    + end
        end

        BOOLEANSTATS = 1
    @@ -1011,6 +1056,7 @@ class ColumnStatisticsData < ::Thrift::U
        STRINGSTATS = 4
        BINARYSTATS = 5
        DECIMALSTATS = 6
    + DATESTATS = 7

        FIELDS = {
          BOOLEANSTATS => {:type => ::Thrift::Types::STRUCT, :name => 'booleanStats', :class => ::BooleanColumnStatsData},
    @@ -1018,7 +1064,8 @@ class ColumnStatisticsData < ::Thrift::U
          DOUBLESTATS => {:type => ::Thrift::Types::STRUCT, :name => 'doubleStats', :class => ::DoubleColumnStatsData},
          STRINGSTATS => {:type => ::Thrift::Types::STRUCT, :name => 'stringStats', :class => ::StringColumnStatsData},
          BINARYSTATS => {:type => ::Thrift::Types::STRUCT, :name => 'binaryStats', :class => ::BinaryColumnStatsData},
    - DECIMALSTATS => {:type => ::Thrift::Types::STRUCT, :name => 'decimalStats', :class => ::DecimalColumnStatsData}
    + DECIMALSTATS => {:type => ::Thrift::Types::STRUCT, :name => 'decimalStats', :class => ::DecimalColumnStatsData},
    + DATESTATS => {:type => ::Thrift::Types::STRUCT, :name => 'dateStats', :class => ::DateColumnStatsData}
        }

        def struct_fields; FIELDS; end

    Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java (original)
    +++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/StatObjectConverter.java Tue Apr 14 20:47:29 2015
    @@ -27,6 +27,8 @@ import org.apache.hadoop.hive.metastore.
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
    +import org.apache.hadoop.hive.metastore.api.Date;
    +import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
      import org.apache.hadoop.hive.metastore.api.Decimal;
      import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
      import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
    @@ -103,6 +105,13 @@ public class StatObjectConverter {
                 binaryStats.isSetNumNulls() ? binaryStats.getNumNulls() : null,
                 binaryStats.isSetMaxColLen() ? binaryStats.getMaxColLen() : null,
                 binaryStats.isSetAvgColLen() ? binaryStats.getAvgColLen() : null);
    + } else if (statsObj.getStatsData().isSetDateStats()) {
    + DateColumnStatsData dateStats = statsObj.getStatsData().getDateStats();
    + mColStats.setDateStats(
    + dateStats.isSetNumNulls() ? dateStats.getNumNulls() : null,
    + dateStats.isSetNumDVs() ? dateStats.getNumDVs() : null,
    + dateStats.isSetLowValue() ? dateStats.getLowValue().getDaysSinceEpoch() : null,
    + dateStats.isSetHighValue() ? dateStats.getHighValue().getDaysSinceEpoch() : null);
           }
           return mColStats;
        }
    @@ -258,6 +267,19 @@ public class StatObjectConverter {
            }
            decimalStats.setNumDVs(mStatsObj.getNumDVs());
            colStatsData.setDecimalStats(decimalStats);
    + } else if (colType.equals("date")) {
    + DateColumnStatsData dateStats = new DateColumnStatsData();
    + dateStats.setNumNulls(mStatsObj.getNumNulls());
    + Long highValue = mStatsObj.getLongHighValue();
    + if (highValue != null) {
    + dateStats.setHighValue(new Date(highValue));
    + }
    + Long lowValue = mStatsObj.getLongLowValue();
    + if (lowValue != null) {
    + dateStats.setLowValue(new Date(lowValue));
    + }
    + dateStats.setNumDVs(mStatsObj.getNumDVs());
    + colStatsData.setDateStats(dateStats);
          }
          statsObj.setStatsData(colStatsData);
          return statsObj;
    @@ -330,6 +352,13 @@ public class StatObjectConverter {
                binaryStats.isSetNumNulls() ? binaryStats.getNumNulls() : null,
                binaryStats.isSetMaxColLen() ? binaryStats.getMaxColLen() : null,
                binaryStats.isSetAvgColLen() ? binaryStats.getAvgColLen() : null);
    + } else if (statsObj.getStatsData().isSetDateStats()) {
    + DateColumnStatsData dateStats = statsObj.getStatsData().getDateStats();
    + mColStats.setDateStats(
    + dateStats.isSetNumNulls() ? dateStats.getNumNulls() : null,
    + dateStats.isSetNumDVs() ? dateStats.getNumDVs() : null,
    + dateStats.isSetLowValue() ? dateStats.getLowValue().getDaysSinceEpoch() : null,
    + dateStats.isSetHighValue() ? dateStats.getHighValue().getDaysSinceEpoch() : null);
          }
          return mColStats;
        }
    @@ -397,6 +426,13 @@ public class StatObjectConverter {
            }
            decimalStats.setNumDVs(mStatsObj.getNumDVs());
            colStatsData.setDecimalStats(decimalStats);
    + } else if (colType.equals("date")) {
    + DateColumnStatsData dateStats = new DateColumnStatsData();
    + dateStats.setNumNulls(mStatsObj.getNumNulls());
    + dateStats.setHighValue(new Date(mStatsObj.getLongHighValue()));
    + dateStats.setLowValue(new Date(mStatsObj.getLongLowValue()));
    + dateStats.setNumDVs(mStatsObj.getNumDVs());
    + colStatsData.setDateStats(dateStats);
          }
          statsObj.setStatsData(colStatsData);
          return statsObj;
    @@ -473,6 +509,17 @@ public class StatObjectConverter {
            }
            decimalStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
            data.setDecimalStats(decimalStats);
    + } else if (colType.equals("date")) {
    + DateColumnStatsData dateStats = new DateColumnStatsData();
    + dateStats.setNumNulls(MetaStoreDirectSql.extractSqlLong(nulls));
    + if (lhigh != null) {
    + dateStats.setHighValue(new Date(MetaStoreDirectSql.extractSqlLong(lhigh)));
    + }
    + if (llow != null) {
    + dateStats.setLowValue(new Date(MetaStoreDirectSql.extractSqlLong(llow)));
    + }
    + dateStats.setNumDVs(MetaStoreDirectSql.extractSqlLong(dist));
    + data.setDateStats(dateStats);
          }
        }


    Modified: hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPartitionColumnStatistics.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPartitionColumnStatistics.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPartitionColumnStatistics.java (original)
    +++ hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MPartitionColumnStatistics.java Tue Apr 14 20:47:29 2015
    @@ -200,6 +200,14 @@ public class MPartitionColumnStatistics
          this.maxColLen = maxColLen;
          this.avgColLen = avgColLen;
        }
    +
    + public void setDateStats(Long numNulls, Long numNDVs, Long lowValue, Long highValue) {
    + this.numNulls = numNulls;
    + this.numDVs = numNDVs;
    + this.longLowValue = lowValue;
    + this.longHighValue = highValue;
    + }
    +
        public Long getLongLowValue() {
          return longLowValue;
        }

    Modified: hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTableColumnStatistics.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTableColumnStatistics.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTableColumnStatistics.java (original)
    +++ hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTableColumnStatistics.java Tue Apr 14 20:47:29 2015
    @@ -191,6 +191,13 @@ public class MTableColumnStatistics {
          this.avgColLen = avgColLen;
        }

    + public void setDateStats(Long numNulls, Long numNDVs, Long lowValue, Long highValue) {
    + this.numNulls = numNulls;
    + this.numDVs = numNDVs;
    + this.longLowValue = lowValue;
    + this.longHighValue = highValue;
    + }
    +
        public Long getLongLowValue() {
          return longLowValue;
        }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java Tue Apr 14 20:47:29 2015
    @@ -36,6 +36,8 @@ import org.apache.hadoop.hive.metastore.
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
    +import org.apache.hadoop.hive.metastore.api.Date;
    +import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
      import org.apache.hadoop.hive.metastore.api.Decimal;
      import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
      import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
    @@ -51,11 +53,13 @@ import org.apache.hadoop.hive.ql.metadat
      import org.apache.hadoop.hive.ql.plan.ColumnStatsWork;
      import org.apache.hadoop.hive.ql.plan.api.StageType;
      import org.apache.hadoop.hive.ql.session.SessionState;
    +import org.apache.hadoop.hive.serde2.io.DateWritable;
      import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
      import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
      import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
      import org.apache.hadoop.hive.serde2.objectinspector.StructField;
      import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
    +import org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
      import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
      import org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
      import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
    @@ -187,6 +191,23 @@ public class ColumnStatsTask extends Tas
          }
        }

    + private void unpackDateStats(ObjectInspector oi, Object o, String fName,
    + ColumnStatisticsObj statsObj) {
    + if (fName.equals("countnulls")) {
    + long v = ((LongObjectInspector) oi).get(o);
    + statsObj.getStatsData().getDateStats().setNumNulls(v);
    + } else if (fName.equals("numdistinctvalues")) {
    + long v = ((LongObjectInspector) oi).get(o);
    + statsObj.getStatsData().getDateStats().setNumDVs(v);
    + } else if (fName.equals("max")) {
    + DateWritable v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
    + statsObj.getStatsData().getDateStats().setHighValue(new Date(v.getDays()));
    + } else if (fName.equals("min")) {
    + DateWritable v = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
    + statsObj.getStatsData().getDateStats().setLowValue(new Date(v.getDays()));
    + }
    + }
    +
        private void unpackPrimitiveObject (ObjectInspector oi, Object o, String fieldName,
            ColumnStatisticsObj statsObj) {
          if (o == null) {
    @@ -222,6 +243,10 @@ public class ColumnStatsTask extends Tas
              DecimalColumnStatsData decimalStats = new DecimalColumnStatsData();
              statsData.setDecimalStats(decimalStats);
              statsObj.setStatsData(statsData);
    + } else if (s.equalsIgnoreCase("date")) {
    + DateColumnStatsData dateStats = new DateColumnStatsData();
    + statsData.setDateStats(dateStats);
    + statsObj.setStatsData(statsData);
            }
          } else {
            // invoke the right unpack method depending on data type of the column
    @@ -237,6 +262,8 @@ public class ColumnStatsTask extends Tas
              unpackBinaryStats(oi, o, fieldName, statsObj);
            } else if (statsObj.getStatsData().isSetDecimalStats()) {
              unpackDecimalStats(oi, o, fieldName, statsObj);
    + } else if (statsObj.getStatsData().isSetDateStats()) {
    + unpackDateStats(oi, o, fieldName, statsObj);
            }
          }
        }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsUpdateTask.java Tue Apr 14 20:47:29 2015
    @@ -36,6 +36,8 @@ import org.apache.hadoop.hive.metastore.
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
    +import org.apache.hadoop.hive.metastore.api.Date;
    +import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
      import org.apache.hadoop.hive.metastore.api.Decimal;
      import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
      import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
    @@ -50,6 +52,7 @@ import org.apache.hadoop.hive.ql.plan.Co
      import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
      import org.apache.hadoop.hive.ql.plan.api.StageType;
      import org.apache.hadoop.hive.ql.session.SessionState;
    +import org.apache.hadoop.hive.serde2.io.DateWritable;

      /**
       * ColumnStatsUpdateTask implementation. For example, ALTER TABLE src_stat
    @@ -235,6 +238,28 @@ public class ColumnStatsUpdateTask exten
            }
            statsData.setDecimalStats(decimalStats);
            statsObj.setStatsData(statsData);
    + } else if (columnType.equalsIgnoreCase("date")) {
    + DateColumnStatsData dateStats = new DateColumnStatsData();
    + Map<String, String> mapProp = work.getMapProp();
    + for (Entry<String, String> entry : mapProp.entrySet()) {
    + String fName = entry.getKey();
    + String value = entry.getValue();
    + if (fName.equals("numNulls")) {
    + dateStats.setNumNulls(Long.parseLong(value));
    + } else if (fName.equals("numDVs")) {
    + dateStats.setNumDVs(Long.parseLong(value));
    + } else if (fName.equals("lowValue")) {
    + // Date high/low value is stored as long in stats DB, but allow users to set high/low
    + // value using either date format (yyyy-mm-dd) or numeric format (days since epoch)
    + dateStats.setLowValue(readDateValue(value));
    + } else if (fName.equals("highValue")) {
    + dateStats.setHighValue(readDateValue(value));
    + } else {
    + throw new SemanticException("Unknown stat");
    + }
    + }
    + statsData.setDateStats(dateStats);
    + statsObj.setStatsData(statsData);
          } else {
            throw new SemanticException("Unsupported type");
          }
    @@ -302,4 +327,16 @@ public class ColumnStatsUpdateTask exten
        public String getName() {
          return "COLUMNSTATS UPDATE TASK";
        }
    +
    + private Date readDateValue(String dateStr) {
    + // try either yyyy-mm-dd, or integer representing days since epoch
    + try {
    + DateWritable writableVal = new DateWritable(java.sql.Date.valueOf(dateStr));
    + return new Date(writableVal.getDays());
    + } catch (IllegalArgumentException err) {
    + // Fallback to integer parsing
    + LOG.debug("Reading date value as days since epoch: " + dateStr);
    + return new Date(Long.parseLong(dateStr));
    + }
    + }
      }

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java Tue Apr 14 20:47:29 2015
    @@ -37,6 +37,7 @@ import org.apache.hadoop.hive.metastore.
      import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData;
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
      import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
    +import org.apache.hadoop.hive.metastore.api.DateColumnStatsData;
      import org.apache.hadoop.hive.metastore.api.Decimal;
      import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData;
      import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData;
    @@ -52,6 +53,7 @@ import org.apache.hadoop.hive.ql.metadat
      import org.apache.hadoop.hive.ql.plan.DescTableDesc;
      import org.apache.hadoop.hive.ql.plan.PlanUtils;
      import org.apache.hadoop.hive.ql.plan.ShowIndexesDesc;
    +import org.apache.hadoop.hive.serde2.io.DateWritable;


      /**
    @@ -145,9 +147,21 @@ public final class MetaDataFormatUtils {
        }

        private static String convertToString(Decimal val) {
    + if (val == null) {
    + return "";
    + }
          return HiveDecimal.create(new BigInteger(val.getUnscaled()), val.getScale()).toString();
        }

    + private static String convertToString(org.apache.hadoop.hive.metastore.api.Date val) {
    + if (val == null) {
    + return "";
    + }
    +
    + DateWritable writableValue = new DateWritable((int) val.getDaysSinceEpoch());
    + return writableValue.toString();
    + }
    +
        private static ColumnStatisticsObj getColumnStatisticsObject(String colName,
            String colType, List<ColumnStatisticsObj> colStats) {
          if (colStats != null && !colStats.isEmpty()) {
    @@ -196,6 +210,12 @@ public final class MetaDataFormatUtils {
                LongColumnStatsData lcsd = csd.getLongStats();
                appendColumnStatsNoFormatting(colBuffer, lcsd.getLowValue(), lcsd.getHighValue(),
                    lcsd.getNumNulls(), lcsd.getNumDVs(), "", "", "", "");
    + } else if (csd.isSetDateStats()) {
    + DateColumnStatsData dcsd = csd.getDateStats();
    + appendColumnStatsNoFormatting(colBuffer,
    + convertToString(dcsd.getLowValue()),
    + convertToString(dcsd.getHighValue()),
    + dcsd.getNumNulls(), dcsd.getNumDVs(), "", "", "", "");
              }
            } else {
              appendColumnStatsNoFormatting(colBuffer, "", "", "", "", "", "", "", "");
    @@ -440,6 +460,12 @@ public final class MetaDataFormatUtils {
                LongColumnStatsData lcsd = csd.getLongStats();
                appendColumnStats(tableInfo, lcsd.getLowValue(), lcsd.getHighValue(), lcsd.getNumNulls(),
                    lcsd.getNumDVs(), "", "", "", "");
    + } else if (csd.isSetDateStats()) {
    + DateColumnStatsData dcsd = csd.getDateStats();
    + appendColumnStats(tableInfo,
    + convertToString(dcsd.getLowValue()),
    + convertToString(dcsd.getHighValue()),
    + dcsd.getNumNulls(), dcsd.getNumDVs(), "", "", "", "");
              }
            } else {
              appendColumnStats(tableInfo, "", "", "", "", "", "", "", "");

    Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java?rev=1673553&r1=1673552&r2=1673553&view=diff
    ==============================================================================
    --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java (original)
    +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java Tue Apr 14 20:47:29 2015
    @@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UD
      import org.apache.hadoop.hive.ql.metadata.HiveException;
      import org.apache.hadoop.hive.ql.parse.SemanticException;
      import org.apache.hadoop.hive.ql.util.JavaDataModel;
    +import org.apache.hadoop.hive.serde2.io.DateWritable;
      import org.apache.hadoop.hive.serde2.io.DoubleWritable;
      import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
      import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
    @@ -86,9 +87,11 @@ public class GenericUDAFComputeStats ext
            return new GenericUDAFBinaryStatsEvaluator();
          case DECIMAL:
            return new GenericUDAFDecimalStatsEvaluator();
    + case DATE:
    + return new GenericUDAFDateStatsEvaluator();
          default:
            throw new UDFArgumentTypeException(0,
    - "Only integer/long/timestamp/float/double/string/binary/boolean/decimal type argument " +
    + "Only integer/long/timestamp/date/float/double/string/binary/boolean/decimal type argument " +
                "is accepted but "
                + parameters[0].getTypeName() + " is passed.");
          }
    @@ -1314,4 +1317,73 @@ public class GenericUDAFComputeStats ext
            ((NumericStatsAgg)agg).reset("Decimal");
          }
        }
    +
    + /**
    + * GenericUDAFDateStatsEvaluator
    + * High/low value will be saved in stats DB as long value representing days since epoch.
    + */
    + public static class GenericUDAFDateStatsEvaluator
    + extends GenericUDAFNumericStatsEvaluator<DateWritable, DateObjectInspector> {
    +
    + @Override
    + protected DateObjectInspector getValueObjectInspector() {
    + return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
    + }
    +
    + @AggregationType(estimable = true)
    + public class DateStatsAgg extends NumericStatsAgg {
    + @Override
    + public int estimate() {
    + JavaDataModel model = JavaDataModel.get();
    + return super.estimate() + model.primitive2() * 2;
    + }
    +
    + @Override
    + protected void update(Object p, PrimitiveObjectInspector inputOI) {
    + // DateWritable is mutable, DateStatsAgg needs its own copy
    + DateWritable v = new DateWritable((DateWritable) inputOI.getPrimitiveWritableObject(p));
    +
    + //Update min counter if new value is less than min seen so far
    + if (min == null || v.compareTo(min) < 0) {
    + min = v;
    + }
    + //Update max counter if new value is greater than max seen so far
    + if (max == null || v.compareTo(max) > 0) {
    + max = v;
    + }
    + // Add value to NumDistinctValue Estimator
    + numDV.addToEstimator(v.getDays());
    + }
    +
    + @Override
    + protected void updateMin(Object minValue, DateObjectInspector minFieldOI) {
    + if ((minValue != null) && (min == null ||
    + min.compareTo(minFieldOI.getPrimitiveWritableObject(minValue)) > 0)) {
    + // DateWritable is mutable, DateStatsAgg needs its own copy
    + min = new DateWritable(minFieldOI.getPrimitiveWritableObject(minValue));
    + }
    + }
    +
    + @Override
    + protected void updateMax(Object maxValue, DateObjectInspector maxFieldOI) {
    + if ((maxValue != null) && (max == null ||
    + max.compareTo(maxFieldOI.getPrimitiveWritableObject(maxValue)) < 0)) {
    + // DateWritable is mutable, DateStatsAgg needs its own copy
    + max = new DateWritable(maxFieldOI.getPrimitiveWritableObject(maxValue));
    + }
    + }
    + };
    +
    + @Override
    + public AggregationBuffer getNewAggregationBuffer() throws HiveException {
    + AggregationBuffer result = new DateStatsAgg();
    + reset(result);
    + return result;
    + }
    +
    + @Override
    + public void reset(AggregationBuffer agg) throws HiveException {
    + ((NumericStatsAgg)agg).reset("Date");
    + }
    + }
      }

    Added: hive/trunk/ql/src/test/queries/clientpositive/compute_stats_date.q
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/compute_stats_date.q?rev=1673553&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/queries/clientpositive/compute_stats_date.q (added)
    +++ hive/trunk/ql/src/test/queries/clientpositive/compute_stats_date.q Tue Apr 14 20:47:29 2015
    @@ -0,0 +1,28 @@
    +
    +create table tab_date (
    + origin_city_name string,
    + dest_city_name string,
    + fl_date date,
    + arr_delay float,
    + fl_num int
    +);
    +
    +-- insert some data
    +load data local inpath '../../data/files/flights_join.txt' overwrite into table tab_date;
    +
    +select count(*) from tab_date;
    +
    +-- compute statistical summary of data
    +select compute_stats(fl_date, 16) from tab_date;
    +
    +explain
    +analyze table tab_date compute statistics for columns fl_date;
    +
    +analyze table tab_date compute statistics for columns fl_date;
    +
    +describe formatted tab_date fl_date;
    +
    +-- Update stats manually. Try both yyyy-mm-dd and integer value for high/low value
    +alter table tab_date update statistics for column fl_date set ('numDVs'='19', 'highValue'='2015-01-01', 'lowValue'='0');
    +
    +describe formatted tab_date fl_date;
  • Jdere at Apr 14, 2015 at 8:47 pm
    Added: hive/trunk/ql/src/test/results/clientpositive/compute_stats_date.q.out
    URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/compute_stats_date.q.out?rev=1673553&view=auto
    ==============================================================================
    --- hive/trunk/ql/src/test/results/clientpositive/compute_stats_date.q.out (added)
    +++ hive/trunk/ql/src/test/results/clientpositive/compute_stats_date.q.out Tue Apr 14 20:47:29 2015
    @@ -0,0 +1,127 @@
    +PREHOOK: query: create table tab_date (
    + origin_city_name string,
    + dest_city_name string,
    + fl_date date,
    + arr_delay float,
    + fl_num int
    +)
    +PREHOOK: type: CREATETABLE
    +PREHOOK: Output: database:default
    +PREHOOK: Output: default@tab_date
    +POSTHOOK: query: create table tab_date (
    + origin_city_name string,
    + dest_city_name string,
    + fl_date date,
    + arr_delay float,
    + fl_num int
    +)
    +POSTHOOK: type: CREATETABLE
    +POSTHOOK: Output: database:default
    +POSTHOOK: Output: default@tab_date
    +PREHOOK: query: -- insert some data
    +load data local inpath '../../data/files/flights_join.txt' overwrite into table tab_date
    +PREHOOK: type: LOAD
    +#### A masked pattern was here ####
    +PREHOOK: Output: default@tab_date
    +POSTHOOK: query: -- insert some data
    +load data local inpath '../../data/files/flights_join.txt' overwrite into table tab_date
    +POSTHOOK: type: LOAD
    +#### A masked pattern was here ####
    +POSTHOOK: Output: default@tab_date
    +PREHOOK: query: select count(*) from tab_date
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@tab_date
    +#### A masked pattern was here ####
    +POSTHOOK: query: select count(*) from tab_date
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@tab_date
    +#### A masked pattern was here ####
    +20
    +PREHOOK: query: -- compute statistical summary of data
    +select compute_stats(fl_date, 16) from tab_date
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@tab_date
    +#### A masked pattern was here ####
    +POSTHOOK: query: -- compute statistical summary of data
    +select compute_stats(fl_date, 16) from tab_date
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@tab_date
    +#### A masked pattern was here ####
    +{"columntype":"Date","min":"2000-11-20","max":"2010-10-29","countnulls":0,"numdistinctvalues":18}
    +PREHOOK: query: explain
    +analyze table tab_date compute statistics for columns fl_date
    +PREHOOK: type: QUERY
    +POSTHOOK: query: explain
    +analyze table tab_date compute statistics for columns fl_date
    +POSTHOOK: type: QUERY
    +STAGE DEPENDENCIES:
    + Stage-0 is a root stage
    + Stage-1 is a root stage
    +
    +STAGE PLANS:
    + Stage: Stage-0
    + Map Reduce
    + Map Operator Tree:
    + TableScan
    + alias: tab_date
    + Select Operator
    + expressions: fl_date (type: date)
    + outputColumnNames: fl_date
    + Group By Operator
    + aggregations: compute_stats(fl_date, 16)
    + mode: hash
    + outputColumnNames: _col0
    + Reduce Output Operator
    + sort order:
    + value expressions: _col0 (type: struct<columntype:string,min:date,max:date,countnulls:bigint,bitvector:string,numbitvectors:int>)
    + Reduce Operator Tree:
    + Group By Operator
    + aggregations: compute_stats(VALUE._col0)
    + mode: mergepartial
    + outputColumnNames: _col0
    + File Output Operator
    + compressed: false
    + table:
    + input format: org.apache.hadoop.mapred.TextInputFormat
    + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
    + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
    +
    + Stage: Stage-1
    + Column Stats Work
    + Column Stats Desc:
    + Columns: fl_date
    + Column Types: date
    + Table: default.tab_date
    +
    +PREHOOK: query: analyze table tab_date compute statistics for columns fl_date
    +PREHOOK: type: QUERY
    +PREHOOK: Input: default@tab_date
    +#### A masked pattern was here ####
    +POSTHOOK: query: analyze table tab_date compute statistics for columns fl_date
    +POSTHOOK: type: QUERY
    +POSTHOOK: Input: default@tab_date
    +#### A masked pattern was here ####
    +PREHOOK: query: describe formatted tab_date fl_date
    +PREHOOK: type: DESCTABLE
    +PREHOOK: Input: default@tab_date
    +POSTHOOK: query: describe formatted tab_date fl_date
    +POSTHOOK: type: DESCTABLE
    +POSTHOOK: Input: default@tab_date
    +# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment
    +
    +fl_date date 2000-11-20 2010-10-29 0 18 from deserializer
    +PREHOOK: query: -- Update stats manually. Try both yyyy-mm-dd and integer value for high/low value
    +alter table tab_date update statistics for column fl_date set ('numDVs'='19', 'highValue'='2015-01-01', 'lowValue'='0')
    +PREHOOK: type: ALTERTABLE_UPDATETABLESTATS
    +POSTHOOK: query: -- Update stats manually. Try both yyyy-mm-dd and integer value for high/low value
    +alter table tab_date update statistics for column fl_date set ('numDVs'='19', 'highValue'='2015-01-01', 'lowValue'='0')
    +POSTHOOK: type: ALTERTABLE_UPDATETABLESTATS
    +PREHOOK: query: describe formatted tab_date fl_date
    +PREHOOK: type: DESCTABLE
    +PREHOOK: Input: default@tab_date
    +POSTHOOK: query: describe formatted tab_date fl_date
    +POSTHOOK: type: DESCTABLE
    +POSTHOOK: Input: default@tab_date
    +# col_name data_type min max num_nulls distinct_count avg_col_len max_col_len num_trues num_falses comment
    +
    +fl_date date 1970-01-01 2015-01-01 0 19 from deserializer

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedApr 14, '15 at 8:47p
activeApr 14, '15 at 8:47p
posts4
users1
websitehive.apache.org

1 user in discussion

Jdere: 4 posts

People

Translate

site design / logo © 2022 Grokbase