FAQ
Author: kevinwilfong
Date: Tue Feb 12 18:52:55 2013
New Revision: 1445309

URL: http://svn.apache.org/r1445309
Log:
HIVE-3252. Add environment context to metastore Thrift calls. (Samuel Yuan via kevinwilfong)

Added:
     hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java
Modified:
     hive/trunk/metastore/if/hive_metastore.thrift
     hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
     hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
     hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
     hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
     hive/trunk/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
     hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
     hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
     hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
     hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
     hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java

Modified: hive/trunk/metastore/if/hive_metastore.thrift
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/if/hive_metastore.thrift?rev=1445309&r1=1445308&r2=1445309&view=diff
==============================================================================
--- hive/trunk/metastore/if/hive_metastore.thrift (original)
+++ hive/trunk/metastore/if/hive_metastore.thrift Tue Feb 12 18:52:55 2013
@@ -363,6 +363,9 @@ service ThriftHiveMetastore extends fb30
    // delete data (including partitions) if deleteData is set to true
    void drop_table(1:string dbname, 2:string name, 3:bool deleteData)
                         throws(1:NoSuchObjectException o1, 2:MetaException o3)
+ void drop_table_with_environment_context(1:string dbname, 2:string name, 3:bool deleteData,
+ 4:EnvironmentContext environment_context)
+ throws(1:NoSuchObjectException o1, 2:MetaException o3)
    list<string> get_tables(1: string db_name, 2: string pattern) throws (1: MetaException o1)
    list<string> get_all_tables(1: string db_name) throws (1: MetaException o1)

@@ -427,12 +430,24 @@ service ThriftHiveMetastore extends fb30
                         throws(1:InvalidObjectException o1, 2:AlreadyExistsException o2, 3:MetaException o3)
    Partition append_partition(1:string db_name, 2:string tbl_name, 3:list<string> part_vals)
                         throws (1:InvalidObjectException o1, 2:AlreadyExistsException o2, 3:MetaException o3)
+ Partition append_partition_with_environment_context(1:string db_name, 2:string tbl_name,
+ 3:list<string> part_vals, 4:EnvironmentContext environment_context)
+ throws (1:InvalidObjectException o1, 2:AlreadyExistsException o2, 3:MetaException o3)
    Partition append_partition_by_name(1:string db_name, 2:string tbl_name, 3:string part_name)
                         throws (1:InvalidObjectException o1, 2:AlreadyExistsException o2, 3:MetaException o3)
+ Partition append_partition_by_name_with_environment_context(1:string db_name, 2:string tbl_name,
+ 3:string part_name, 4:EnvironmentContext environment_context)
+ throws (1:InvalidObjectException o1, 2:AlreadyExistsException o2, 3:MetaException o3)
    bool drop_partition(1:string db_name, 2:string tbl_name, 3:list<string> part_vals, 4:bool deleteData)
                         throws(1:NoSuchObjectException o1, 2:MetaException o2)
+ bool drop_partition_with_environment_context(1:string db_name, 2:string tbl_name,
+ 3:list<string> part_vals, 4:bool deleteData, 5:EnvironmentContext environment_context)
+ throws(1:NoSuchObjectException o1, 2:MetaException o2)
    bool drop_partition_by_name(1:string db_name, 2:string tbl_name, 3:string part_name, 4:bool deleteData)
- throws(1:NoSuchObjectException o1, 2:MetaException o2)
+ throws(1:NoSuchObjectException o1, 2:MetaException o2)
+ bool drop_partition_by_name_with_environment_context(1:string db_name, 2:string tbl_name,
+ 3:string part_name, 4:bool deleteData, 5:EnvironmentContext environment_context)
+ throws(1:NoSuchObjectException o1, 2:MetaException o2)
    Partition get_partition(1:string db_name, 2:string tbl_name, 3:list<string> part_vals)
                         throws(1:MetaException o1, 2:NoSuchObjectException o2)

Search Discussions

  • Kevinwilfong at Feb 12, 2013 at 6:53 pm
    Modified: hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h Tue Feb 12 18:52:55 2013
    @@ -31,6 +31,7 @@ class ThriftHiveMetastoreIf : virtual pu
        virtual void create_table(const Table& tbl) = 0;
        virtual void create_table_with_environment_context(const Table& tbl, const EnvironmentContext& environment_context) = 0;
        virtual void drop_table(const std::string& dbname, const std::string& name, const bool deleteData) = 0;
    + virtual void drop_table_with_environment_context(const std::string& dbname, const std::string& name, const bool deleteData, const EnvironmentContext& environment_context) = 0;
        virtual void get_tables(std::vector<std::string> & _return, const std::string& db_name, const std::string& pattern) = 0;
        virtual void get_all_tables(std::vector<std::string> & _return, const std::string& db_name) = 0;
        virtual void get_table(Table& _return, const std::string& dbname, const std::string& tbl_name) = 0;
    @@ -42,9 +43,13 @@ class ThriftHiveMetastoreIf : virtual pu
        virtual void add_partition_with_environment_context(Partition& _return, const Partition& new_part, const EnvironmentContext& environment_context) = 0;
        virtual int32_t add_partitions(const std::vector<Partition> & new_parts) = 0;
        virtual void append_partition(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals) = 0;
    + virtual void append_partition_with_environment_context(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const EnvironmentContext& environment_context) = 0;
        virtual void append_partition_by_name(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::string& part_name) = 0;
    + virtual void append_partition_by_name_with_environment_context(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const EnvironmentContext& environment_context) = 0;
        virtual bool drop_partition(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const bool deleteData) = 0;
    + virtual bool drop_partition_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const bool deleteData, const EnvironmentContext& environment_context) = 0;
        virtual bool drop_partition_by_name(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const bool deleteData) = 0;
    + virtual bool drop_partition_by_name_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const bool deleteData, const EnvironmentContext& environment_context) = 0;
        virtual void get_partition(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals) = 0;
        virtual void get_partition_with_auth(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const std::string& user_name, const std::vector<std::string> & group_names) = 0;
        virtual void get_partition_by_name(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::string& part_name) = 0;
    @@ -167,6 +172,9 @@ class ThriftHiveMetastoreNull : virtual
        void drop_table(const std::string& /* dbname */, const std::string& /* name */, const bool /* deleteData */) {
          return;
        }
    + void drop_table_with_environment_context(const std::string& /* dbname */, const std::string& /* name */, const bool /* deleteData */, const EnvironmentContext& /* environment_context */) {
    + return;
    + }
        void get_tables(std::vector<std::string> & /* _return */, const std::string& /* db_name */, const std::string& /* pattern */) {
          return;
        }
    @@ -201,17 +209,31 @@ class ThriftHiveMetastoreNull : virtual
        void append_partition(Partition& /* _return */, const std::string& /* db_name */, const std::string& /* tbl_name */, const std::vector<std::string> & /* part_vals */) {
          return;
        }
    + void append_partition_with_environment_context(Partition& /* _return */, const std::string& /* db_name */, const std::string& /* tbl_name */, const std::vector<std::string> & /* part_vals */, const EnvironmentContext& /* environment_context */) {
    + return;
    + }
        void append_partition_by_name(Partition& /* _return */, const std::string& /* db_name */, const std::string& /* tbl_name */, const std::string& /* part_name */) {
          return;
        }
    + void append_partition_by_name_with_environment_context(Partition& /* _return */, const std::string& /* db_name */, const std::string& /* tbl_name */, const std::string& /* part_name */, const EnvironmentContext& /* environment_context */) {
    + return;
    + }
        bool drop_partition(const std::string& /* db_name */, const std::string& /* tbl_name */, const std::vector<std::string> & /* part_vals */, const bool /* deleteData */) {
          bool _return = false;
          return _return;
        }
    + bool drop_partition_with_environment_context(const std::string& /* db_name */, const std::string& /* tbl_name */, const std::vector<std::string> & /* part_vals */, const bool /* deleteData */, const EnvironmentContext& /* environment_context */) {
    + bool _return = false;
    + return _return;
    + }
        bool drop_partition_by_name(const std::string& /* db_name */, const std::string& /* tbl_name */, const std::string& /* part_name */, const bool /* deleteData */) {
          bool _return = false;
          return _return;
        }
    + bool drop_partition_by_name_with_environment_context(const std::string& /* db_name */, const std::string& /* tbl_name */, const std::string& /* part_name */, const bool /* deleteData */, const EnvironmentContext& /* environment_context */) {
    + bool _return = false;
    + return _return;
    + }
        void get_partition(Partition& /* _return */, const std::string& /* db_name */, const std::string& /* tbl_name */, const std::vector<std::string> & /* part_vals */) {
          return;
        }
    @@ -2343,6 +2365,151 @@ class ThriftHiveMetastore_drop_table_pre

      };

    +typedef struct _ThriftHiveMetastore_drop_table_with_environment_context_args__isset {
    + _ThriftHiveMetastore_drop_table_with_environment_context_args__isset() : dbname(false), name(false), deleteData(false), environment_context(false) {}
    + bool dbname;
    + bool name;
    + bool deleteData;
    + bool environment_context;
    +} _ThriftHiveMetastore_drop_table_with_environment_context_args__isset;
    +
    +class ThriftHiveMetastore_drop_table_with_environment_context_args {
    + public:
    +
    + ThriftHiveMetastore_drop_table_with_environment_context_args() : dbname(), name(), deleteData(0) {
    + }
    +
    + virtual ~ThriftHiveMetastore_drop_table_with_environment_context_args() throw() {}
    +
    + std::string dbname;
    + std::string name;
    + bool deleteData;
    + EnvironmentContext environment_context;
    +
    + _ThriftHiveMetastore_drop_table_with_environment_context_args__isset __isset;
    +
    + void __set_dbname(const std::string& val) {
    + dbname = val;
    + }
    +
    + void __set_name(const std::string& val) {
    + name = val;
    + }
    +
    + void __set_deleteData(const bool val) {
    + deleteData = val;
    + }
    +
    + void __set_environment_context(const EnvironmentContext& val) {
    + environment_context = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_drop_table_with_environment_context_args & rhs) const
    + {
    + if (!(dbname == rhs.dbname))
    + return false;
    + if (!(name == rhs.name))
    + return false;
    + if (!(deleteData == rhs.deleteData))
    + return false;
    + if (!(environment_context == rhs.environment_context))
    + return false;
    + return true;
    + }
    + bool operator != (const ThriftHiveMetastore_drop_table_with_environment_context_args &rhs) const {
    + return !(*this == rhs);
    + }
    +
    + bool operator < (const ThriftHiveMetastore_drop_table_with_environment_context_args & ) const;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +
    +class ThriftHiveMetastore_drop_table_with_environment_context_pargs {
    + public:
    +
    +
    + virtual ~ThriftHiveMetastore_drop_table_with_environment_context_pargs() throw() {}
    +
    + const std::string* dbname;
    + const std::string* name;
    + const bool* deleteData;
    + const EnvironmentContext* environment_context;
    +
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_drop_table_with_environment_context_result__isset {
    + _ThriftHiveMetastore_drop_table_with_environment_context_result__isset() : o1(false), o3(false) {}
    + bool o1;
    + bool o3;
    +} _ThriftHiveMetastore_drop_table_with_environment_context_result__isset;
    +
    +class ThriftHiveMetastore_drop_table_with_environment_context_result {
    + public:
    +
    + ThriftHiveMetastore_drop_table_with_environment_context_result() {
    + }
    +
    + virtual ~ThriftHiveMetastore_drop_table_with_environment_context_result() throw() {}
    +
    + NoSuchObjectException o1;
    + MetaException o3;
    +
    + _ThriftHiveMetastore_drop_table_with_environment_context_result__isset __isset;
    +
    + void __set_o1(const NoSuchObjectException& val) {
    + o1 = val;
    + }
    +
    + void __set_o3(const MetaException& val) {
    + o3 = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_drop_table_with_environment_context_result & rhs) const
    + {
    + if (!(o1 == rhs.o1))
    + return false;
    + if (!(o3 == rhs.o3))
    + return false;
    + return true;
    + }
    + bool operator != (const ThriftHiveMetastore_drop_table_with_environment_context_result &rhs) const {
    + return !(*this == rhs);
    + }
    +
    + bool operator < (const ThriftHiveMetastore_drop_table_with_environment_context_result & ) const;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_drop_table_with_environment_context_presult__isset {
    + _ThriftHiveMetastore_drop_table_with_environment_context_presult__isset() : o1(false), o3(false) {}
    + bool o1;
    + bool o3;
    +} _ThriftHiveMetastore_drop_table_with_environment_context_presult__isset;
    +
    +class ThriftHiveMetastore_drop_table_with_environment_context_presult {
    + public:
    +
    +
    + virtual ~ThriftHiveMetastore_drop_table_with_environment_context_presult() throw() {}
    +
    + NoSuchObjectException o1;
    + MetaException o3;
    +
    + _ThriftHiveMetastore_drop_table_with_environment_context_presult__isset __isset;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    +
    +};
    +
      typedef struct _ThriftHiveMetastore_get_tables_args__isset {
        _ThriftHiveMetastore_get_tables_args__isset() : db_name(false), pattern(false) {}
        bool db_name;
    @@ -3836,11 +4003,656 @@ class ThriftHiveMetastore_append_partiti
          o2 = val;
        }

    - void __set_o3(const MetaException& val) {
    - o3 = val;
    + void __set_o3(const MetaException& val) {
    + o3 = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_append_partition_result & rhs) const
    + {
    + if (!(success == rhs.success))
    + return false;
    + if (!(o1 == rhs.o1))
    + return false;
    + if (!(o2 == rhs.o2))
    + return false;
    + if (!(o3 == rhs.o3))
    + return false;
    + return true;
    + }
    + bool operator != (const ThriftHiveMetastore_append_partition_result &rhs) const {
    + return !(*this == rhs);
    + }
    +
    + bool operator < (const ThriftHiveMetastore_append_partition_result & ) const;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_append_partition_presult__isset {
    + _ThriftHiveMetastore_append_partition_presult__isset() : success(false), o1(false), o2(false), o3(false) {}
    + bool success;
    + bool o1;
    + bool o2;
    + bool o3;
    +} _ThriftHiveMetastore_append_partition_presult__isset;
    +
    +class ThriftHiveMetastore_append_partition_presult {
    + public:
    +
    +
    + virtual ~ThriftHiveMetastore_append_partition_presult() throw() {}
    +
    + Partition* success;
    + InvalidObjectException o1;
    + AlreadyExistsException o2;
    + MetaException o3;
    +
    + _ThriftHiveMetastore_append_partition_presult__isset __isset;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_append_partition_with_environment_context_args__isset {
    + _ThriftHiveMetastore_append_partition_with_environment_context_args__isset() : db_name(false), tbl_name(false), part_vals(false), environment_context(false) {}
    + bool db_name;
    + bool tbl_name;
    + bool part_vals;
    + bool environment_context;
    +} _ThriftHiveMetastore_append_partition_with_environment_context_args__isset;
    +
    +class ThriftHiveMetastore_append_partition_with_environment_context_args {
    + public:
    +
    + ThriftHiveMetastore_append_partition_with_environment_context_args() : db_name(), tbl_name() {
    + }
    +
    + virtual ~ThriftHiveMetastore_append_partition_with_environment_context_args() throw() {}
    +
    + std::string db_name;
    + std::string tbl_name;
    + std::vector<std::string> part_vals;
    + EnvironmentContext environment_context;
    +
    + _ThriftHiveMetastore_append_partition_with_environment_context_args__isset __isset;
    +
    + void __set_db_name(const std::string& val) {
    + db_name = val;
    + }
    +
    + void __set_tbl_name(const std::string& val) {
    + tbl_name = val;
    + }
    +
    + void __set_part_vals(const std::vector<std::string> & val) {
    + part_vals = val;
    + }
    +
    + void __set_environment_context(const EnvironmentContext& val) {
    + environment_context = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_append_partition_with_environment_context_args & rhs) const
    + {
    + if (!(db_name == rhs.db_name))
    + return false;
    + if (!(tbl_name == rhs.tbl_name))
    + return false;
    + if (!(part_vals == rhs.part_vals))
    + return false;
    + if (!(environment_context == rhs.environment_context))
    + return false;
    + return true;
    + }
    + bool operator != (const ThriftHiveMetastore_append_partition_with_environment_context_args &rhs) const {
    + return !(*this == rhs);
    + }
    +
    + bool operator < (const ThriftHiveMetastore_append_partition_with_environment_context_args & ) const;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +
    +class ThriftHiveMetastore_append_partition_with_environment_context_pargs {
    + public:
    +
    +
    + virtual ~ThriftHiveMetastore_append_partition_with_environment_context_pargs() throw() {}
    +
    + const std::string* db_name;
    + const std::string* tbl_name;
    + const std::vector<std::string> * part_vals;
    + const EnvironmentContext* environment_context;
    +
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_append_partition_with_environment_context_result__isset {
    + _ThriftHiveMetastore_append_partition_with_environment_context_result__isset() : success(false), o1(false), o2(false), o3(false) {}
    + bool success;
    + bool o1;
    + bool o2;
    + bool o3;
    +} _ThriftHiveMetastore_append_partition_with_environment_context_result__isset;
    +
    +class ThriftHiveMetastore_append_partition_with_environment_context_result {
    + public:
    +
    + ThriftHiveMetastore_append_partition_with_environment_context_result() {
    + }
    +
    + virtual ~ThriftHiveMetastore_append_partition_with_environment_context_result() throw() {}
    +
    + Partition success;
    + InvalidObjectException o1;
    + AlreadyExistsException o2;
    + MetaException o3;
    +
    + _ThriftHiveMetastore_append_partition_with_environment_context_result__isset __isset;
    +
    + void __set_success(const Partition& val) {
    + success = val;
    + }
    +
    + void __set_o1(const InvalidObjectException& val) {
    + o1 = val;
    + }
    +
    + void __set_o2(const AlreadyExistsException& val) {
    + o2 = val;
    + }
    +
    + void __set_o3(const MetaException& val) {
    + o3 = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_append_partition_with_environment_context_result & rhs) const
    + {
    + if (!(success == rhs.success))
    + return false;
    + if (!(o1 == rhs.o1))
    + return false;
    + if (!(o2 == rhs.o2))
    + return false;
    + if (!(o3 == rhs.o3))
    + return false;
    + return true;
    + }
    + bool operator != (const ThriftHiveMetastore_append_partition_with_environment_context_result &rhs) const {
    + return !(*this == rhs);
    + }
    +
    + bool operator < (const ThriftHiveMetastore_append_partition_with_environment_context_result & ) const;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_append_partition_with_environment_context_presult__isset {
    + _ThriftHiveMetastore_append_partition_with_environment_context_presult__isset() : success(false), o1(false), o2(false), o3(false) {}
    + bool success;
    + bool o1;
    + bool o2;
    + bool o3;
    +} _ThriftHiveMetastore_append_partition_with_environment_context_presult__isset;
    +
    +class ThriftHiveMetastore_append_partition_with_environment_context_presult {
    + public:
    +
    +
    + virtual ~ThriftHiveMetastore_append_partition_with_environment_context_presult() throw() {}
    +
    + Partition* success;
    + InvalidObjectException o1;
    + AlreadyExistsException o2;
    + MetaException o3;
    +
    + _ThriftHiveMetastore_append_partition_with_environment_context_presult__isset __isset;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_append_partition_by_name_args__isset {
    + _ThriftHiveMetastore_append_partition_by_name_args__isset() : db_name(false), tbl_name(false), part_name(false) {}
    + bool db_name;
    + bool tbl_name;
    + bool part_name;
    +} _ThriftHiveMetastore_append_partition_by_name_args__isset;
    +
    +class ThriftHiveMetastore_append_partition_by_name_args {
    + public:
    +
    + ThriftHiveMetastore_append_partition_by_name_args() : db_name(), tbl_name(), part_name() {
    + }
    +
    + virtual ~ThriftHiveMetastore_append_partition_by_name_args() throw() {}
    +
    + std::string db_name;
    + std::string tbl_name;
    + std::string part_name;
    +
    + _ThriftHiveMetastore_append_partition_by_name_args__isset __isset;
    +
    + void __set_db_name(const std::string& val) {
    + db_name = val;
    + }
    +
    + void __set_tbl_name(const std::string& val) {
    + tbl_name = val;
    + }
    +
    + void __set_part_name(const std::string& val) {
    + part_name = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_append_partition_by_name_args & rhs) const
    + {
    + if (!(db_name == rhs.db_name))
    + return false;
    + if (!(tbl_name == rhs.tbl_name))
    + return false;
    + if (!(part_name == rhs.part_name))
    + return false;
    + return true;
    + }
    + bool operator != (const ThriftHiveMetastore_append_partition_by_name_args &rhs) const {
    + return !(*this == rhs);
    + }
    +
    + bool operator < (const ThriftHiveMetastore_append_partition_by_name_args & ) const;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +
    +class ThriftHiveMetastore_append_partition_by_name_pargs {
    + public:
    +
    +
    + virtual ~ThriftHiveMetastore_append_partition_by_name_pargs() throw() {}
    +
    + const std::string* db_name;
    + const std::string* tbl_name;
    + const std::string* part_name;
    +
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_append_partition_by_name_result__isset {
    + _ThriftHiveMetastore_append_partition_by_name_result__isset() : success(false), o1(false), o2(false), o3(false) {}
    + bool success;
    + bool o1;
    + bool o2;
    + bool o3;
    +} _ThriftHiveMetastore_append_partition_by_name_result__isset;
    +
    +class ThriftHiveMetastore_append_partition_by_name_result {
    + public:
    +
    + ThriftHiveMetastore_append_partition_by_name_result() {
    + }
    +
    + virtual ~ThriftHiveMetastore_append_partition_by_name_result() throw() {}
    +
    + Partition success;
    + InvalidObjectException o1;
    + AlreadyExistsException o2;
    + MetaException o3;
    +
    + _ThriftHiveMetastore_append_partition_by_name_result__isset __isset;
    +
    + void __set_success(const Partition& val) {
    + success = val;
    + }
    +
    + void __set_o1(const InvalidObjectException& val) {
    + o1 = val;
    + }
    +
    + void __set_o2(const AlreadyExistsException& val) {
    + o2 = val;
    + }
    +
    + void __set_o3(const MetaException& val) {
    + o3 = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_append_partition_by_name_result & rhs) const
    + {
    + if (!(success == rhs.success))
    + return false;
    + if (!(o1 == rhs.o1))
    + return false;
    + if (!(o2 == rhs.o2))
    + return false;
    + if (!(o3 == rhs.o3))
    + return false;
    + return true;
    + }
    + bool operator != (const ThriftHiveMetastore_append_partition_by_name_result &rhs) const {
    + return !(*this == rhs);
    + }
    +
    + bool operator < (const ThriftHiveMetastore_append_partition_by_name_result & ) const;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_append_partition_by_name_presult__isset {
    + _ThriftHiveMetastore_append_partition_by_name_presult__isset() : success(false), o1(false), o2(false), o3(false) {}
    + bool success;
    + bool o1;
    + bool o2;
    + bool o3;
    +} _ThriftHiveMetastore_append_partition_by_name_presult__isset;
    +
    +class ThriftHiveMetastore_append_partition_by_name_presult {
    + public:
    +
    +
    + virtual ~ThriftHiveMetastore_append_partition_by_name_presult() throw() {}
    +
    + Partition* success;
    + InvalidObjectException o1;
    + AlreadyExistsException o2;
    + MetaException o3;
    +
    + _ThriftHiveMetastore_append_partition_by_name_presult__isset __isset;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_append_partition_by_name_with_environment_context_args__isset {
    + _ThriftHiveMetastore_append_partition_by_name_with_environment_context_args__isset() : db_name(false), tbl_name(false), part_name(false), environment_context(false) {}
    + bool db_name;
    + bool tbl_name;
    + bool part_name;
    + bool environment_context;
    +} _ThriftHiveMetastore_append_partition_by_name_with_environment_context_args__isset;
    +
    +class ThriftHiveMetastore_append_partition_by_name_with_environment_context_args {
    + public:
    +
    + ThriftHiveMetastore_append_partition_by_name_with_environment_context_args() : db_name(), tbl_name(), part_name() {
    + }
    +
    + virtual ~ThriftHiveMetastore_append_partition_by_name_with_environment_context_args() throw() {}
    +
    + std::string db_name;
    + std::string tbl_name;
    + std::string part_name;
    + EnvironmentContext environment_context;
    +
    + _ThriftHiveMetastore_append_partition_by_name_with_environment_context_args__isset __isset;
    +
    + void __set_db_name(const std::string& val) {
    + db_name = val;
    + }
    +
    + void __set_tbl_name(const std::string& val) {
    + tbl_name = val;
    + }
    +
    + void __set_part_name(const std::string& val) {
    + part_name = val;
    + }
    +
    + void __set_environment_context(const EnvironmentContext& val) {
    + environment_context = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_append_partition_by_name_with_environment_context_args & rhs) const
    + {
    + if (!(db_name == rhs.db_name))
    + return false;
    + if (!(tbl_name == rhs.tbl_name))
    + return false;
    + if (!(part_name == rhs.part_name))
    + return false;
    + if (!(environment_context == rhs.environment_context))
    + return false;
    + return true;
    + }
    + bool operator != (const ThriftHiveMetastore_append_partition_by_name_with_environment_context_args &rhs) const {
    + return !(*this == rhs);
    + }
    +
    + bool operator < (const ThriftHiveMetastore_append_partition_by_name_with_environment_context_args & ) const;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +
    +class ThriftHiveMetastore_append_partition_by_name_with_environment_context_pargs {
    + public:
    +
    +
    + virtual ~ThriftHiveMetastore_append_partition_by_name_with_environment_context_pargs() throw() {}
    +
    + const std::string* db_name;
    + const std::string* tbl_name;
    + const std::string* part_name;
    + const EnvironmentContext* environment_context;
    +
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_append_partition_by_name_with_environment_context_result__isset {
    + _ThriftHiveMetastore_append_partition_by_name_with_environment_context_result__isset() : success(false), o1(false), o2(false), o3(false) {}
    + bool success;
    + bool o1;
    + bool o2;
    + bool o3;
    +} _ThriftHiveMetastore_append_partition_by_name_with_environment_context_result__isset;
    +
    +class ThriftHiveMetastore_append_partition_by_name_with_environment_context_result {
    + public:
    +
    + ThriftHiveMetastore_append_partition_by_name_with_environment_context_result() {
    + }
    +
    + virtual ~ThriftHiveMetastore_append_partition_by_name_with_environment_context_result() throw() {}
    +
    + Partition success;
    + InvalidObjectException o1;
    + AlreadyExistsException o2;
    + MetaException o3;
    +
    + _ThriftHiveMetastore_append_partition_by_name_with_environment_context_result__isset __isset;
    +
    + void __set_success(const Partition& val) {
    + success = val;
    + }
    +
    + void __set_o1(const InvalidObjectException& val) {
    + o1 = val;
    + }
    +
    + void __set_o2(const AlreadyExistsException& val) {
    + o2 = val;
    + }
    +
    + void __set_o3(const MetaException& val) {
    + o3 = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_append_partition_by_name_with_environment_context_result & rhs) const
    + {
    + if (!(success == rhs.success))
    + return false;
    + if (!(o1 == rhs.o1))
    + return false;
    + if (!(o2 == rhs.o2))
    + return false;
    + if (!(o3 == rhs.o3))
    + return false;
    + return true;
    + }
    + bool operator != (const ThriftHiveMetastore_append_partition_by_name_with_environment_context_result &rhs) const {
    + return !(*this == rhs);
    + }
    +
    + bool operator < (const ThriftHiveMetastore_append_partition_by_name_with_environment_context_result & ) const;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_append_partition_by_name_with_environment_context_presult__isset {
    + _ThriftHiveMetastore_append_partition_by_name_with_environment_context_presult__isset() : success(false), o1(false), o2(false), o3(false) {}
    + bool success;
    + bool o1;
    + bool o2;
    + bool o3;
    +} _ThriftHiveMetastore_append_partition_by_name_with_environment_context_presult__isset;
    +
    +class ThriftHiveMetastore_append_partition_by_name_with_environment_context_presult {
    + public:
    +
    +
    + virtual ~ThriftHiveMetastore_append_partition_by_name_with_environment_context_presult() throw() {}
    +
    + Partition* success;
    + InvalidObjectException o1;
    + AlreadyExistsException o2;
    + MetaException o3;
    +
    + _ThriftHiveMetastore_append_partition_by_name_with_environment_context_presult__isset __isset;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_drop_partition_args__isset {
    + _ThriftHiveMetastore_drop_partition_args__isset() : db_name(false), tbl_name(false), part_vals(false), deleteData(false) {}
    + bool db_name;
    + bool tbl_name;
    + bool part_vals;
    + bool deleteData;
    +} _ThriftHiveMetastore_drop_partition_args__isset;
    +
    +class ThriftHiveMetastore_drop_partition_args {
    + public:
    +
    + ThriftHiveMetastore_drop_partition_args() : db_name(), tbl_name(), deleteData(0) {
    + }
    +
    + virtual ~ThriftHiveMetastore_drop_partition_args() throw() {}
    +
    + std::string db_name;
    + std::string tbl_name;
    + std::vector<std::string> part_vals;
    + bool deleteData;
    +
    + _ThriftHiveMetastore_drop_partition_args__isset __isset;
    +
    + void __set_db_name(const std::string& val) {
    + db_name = val;
    + }
    +
    + void __set_tbl_name(const std::string& val) {
    + tbl_name = val;
    + }
    +
    + void __set_part_vals(const std::vector<std::string> & val) {
    + part_vals = val;
    + }
    +
    + void __set_deleteData(const bool val) {
    + deleteData = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_drop_partition_args & rhs) const
    + {
    + if (!(db_name == rhs.db_name))
    + return false;
    + if (!(tbl_name == rhs.tbl_name))
    + return false;
    + if (!(part_vals == rhs.part_vals))
    + return false;
    + if (!(deleteData == rhs.deleteData))
    + return false;
    + return true;
    + }
    + bool operator != (const ThriftHiveMetastore_drop_partition_args &rhs) const {
    + return !(*this == rhs);
    + }
    +
    + bool operator < (const ThriftHiveMetastore_drop_partition_args & ) const;
    +
    + uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +
    +class ThriftHiveMetastore_drop_partition_pargs {
    + public:
    +
    +
    + virtual ~ThriftHiveMetastore_drop_partition_pargs() throw() {}
    +
    + const std::string* db_name;
    + const std::string* tbl_name;
    + const std::vector<std::string> * part_vals;
    + const bool* deleteData;
    +
    + uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    +
    +};
    +
    +typedef struct _ThriftHiveMetastore_drop_partition_result__isset {
    + _ThriftHiveMetastore_drop_partition_result__isset() : success(false), o1(false), o2(false) {}
    + bool success;
    + bool o1;
    + bool o2;
    +} _ThriftHiveMetastore_drop_partition_result__isset;
    +
    +class ThriftHiveMetastore_drop_partition_result {
    + public:
    +
    + ThriftHiveMetastore_drop_partition_result() : success(0) {
    + }
    +
    + virtual ~ThriftHiveMetastore_drop_partition_result() throw() {}
    +
    + bool success;
    + NoSuchObjectException o1;
    + MetaException o2;
    +
    + _ThriftHiveMetastore_drop_partition_result__isset __isset;
    +
    + void __set_success(const bool val) {
    + success = val;
    + }
    +
    + void __set_o1(const NoSuchObjectException& val) {
    + o1 = val;
    + }
    +
    + void __set_o2(const MetaException& val) {
    + o2 = val;
        }

    - bool operator == (const ThriftHiveMetastore_append_partition_result & rhs) const
    + bool operator == (const ThriftHiveMetastore_drop_partition_result & rhs) const
        {
          if (!(success == rhs.success))
            return false;
    @@ -3848,66 +4660,66 @@ class ThriftHiveMetastore_append_partiti
            return false;
          if (!(o2 == rhs.o2))
            return false;
    - if (!(o3 == rhs.o3))
    - return false;
          return true;
        }
    - bool operator != (const ThriftHiveMetastore_append_partition_result &rhs) const {
    + bool operator != (const ThriftHiveMetastore_drop_partition_result &rhs) const {
          return !(*this == rhs);
        }

    - bool operator < (const ThriftHiveMetastore_append_partition_result & ) const;
    + bool operator < (const ThriftHiveMetastore_drop_partition_result & ) const;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
        uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;

      };

    -typedef struct _ThriftHiveMetastore_append_partition_presult__isset {
    - _ThriftHiveMetastore_append_partition_presult__isset() : success(false), o1(false), o2(false), o3(false) {}
    +typedef struct _ThriftHiveMetastore_drop_partition_presult__isset {
    + _ThriftHiveMetastore_drop_partition_presult__isset() : success(false), o1(false), o2(false) {}
        bool success;
        bool o1;
        bool o2;
    - bool o3;
    -} _ThriftHiveMetastore_append_partition_presult__isset;
    +} _ThriftHiveMetastore_drop_partition_presult__isset;

    -class ThriftHiveMetastore_append_partition_presult {
    +class ThriftHiveMetastore_drop_partition_presult {
       public:


    - virtual ~ThriftHiveMetastore_append_partition_presult() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_presult() throw() {}

    - Partition* success;
    - InvalidObjectException o1;
    - AlreadyExistsException o2;
    - MetaException o3;
    + bool* success;
    + NoSuchObjectException o1;
    + MetaException o2;

    - _ThriftHiveMetastore_append_partition_presult__isset __isset;
    + _ThriftHiveMetastore_drop_partition_presult__isset __isset;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);

      };

    -typedef struct _ThriftHiveMetastore_append_partition_by_name_args__isset {
    - _ThriftHiveMetastore_append_partition_by_name_args__isset() : db_name(false), tbl_name(false), part_name(false) {}
    +typedef struct _ThriftHiveMetastore_drop_partition_with_environment_context_args__isset {
    + _ThriftHiveMetastore_drop_partition_with_environment_context_args__isset() : db_name(false), tbl_name(false), part_vals(false), deleteData(false), environment_context(false) {}
        bool db_name;
        bool tbl_name;
    - bool part_name;
    -} _ThriftHiveMetastore_append_partition_by_name_args__isset;
    + bool part_vals;
    + bool deleteData;
    + bool environment_context;
    +} _ThriftHiveMetastore_drop_partition_with_environment_context_args__isset;

    -class ThriftHiveMetastore_append_partition_by_name_args {
    +class ThriftHiveMetastore_drop_partition_with_environment_context_args {
       public:

    - ThriftHiveMetastore_append_partition_by_name_args() : db_name(), tbl_name(), part_name() {
    + ThriftHiveMetastore_drop_partition_with_environment_context_args() : db_name(), tbl_name(), deleteData(0) {
        }

    - virtual ~ThriftHiveMetastore_append_partition_by_name_args() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_with_environment_context_args() throw() {}

        std::string db_name;
        std::string tbl_name;
    - std::string part_name;
    + std::vector<std::string> part_vals;
    + bool deleteData;
    + EnvironmentContext environment_context;

    - _ThriftHiveMetastore_append_partition_by_name_args__isset __isset;
    + _ThriftHiveMetastore_drop_partition_with_environment_context_args__isset __isset;

        void __set_db_name(const std::string& val) {
          db_name = val;
    @@ -3917,25 +4729,37 @@ class ThriftHiveMetastore_append_partiti
          tbl_name = val;
        }

    - void __set_part_name(const std::string& val) {
    - part_name = val;
    + void __set_part_vals(const std::vector<std::string> & val) {
    + part_vals = val;
        }

    - bool operator == (const ThriftHiveMetastore_append_partition_by_name_args & rhs) const
    + void __set_deleteData(const bool val) {
    + deleteData = val;
    + }
    +
    + void __set_environment_context(const EnvironmentContext& val) {
    + environment_context = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_drop_partition_with_environment_context_args & rhs) const
        {
          if (!(db_name == rhs.db_name))
            return false;
          if (!(tbl_name == rhs.tbl_name))
            return false;
    - if (!(part_name == rhs.part_name))
    + if (!(part_vals == rhs.part_vals))
    + return false;
    + if (!(deleteData == rhs.deleteData))
    + return false;
    + if (!(environment_context == rhs.environment_context))
            return false;
          return true;
        }
    - bool operator != (const ThriftHiveMetastore_append_partition_by_name_args &rhs) const {
    + bool operator != (const ThriftHiveMetastore_drop_partition_with_environment_context_args &rhs) const {
          return !(*this == rhs);
        }

    - bool operator < (const ThriftHiveMetastore_append_partition_by_name_args & ) const;
    + bool operator < (const ThriftHiveMetastore_drop_partition_with_environment_context_args & ) const;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
        uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    @@ -3943,60 +4767,56 @@ class ThriftHiveMetastore_append_partiti
      };


    -class ThriftHiveMetastore_append_partition_by_name_pargs {
    +class ThriftHiveMetastore_drop_partition_with_environment_context_pargs {
       public:


    - virtual ~ThriftHiveMetastore_append_partition_by_name_pargs() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_with_environment_context_pargs() throw() {}

        const std::string* db_name;
        const std::string* tbl_name;
    - const std::string* part_name;
    + const std::vector<std::string> * part_vals;
    + const bool* deleteData;
    + const EnvironmentContext* environment_context;

        uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;

      };

    -typedef struct _ThriftHiveMetastore_append_partition_by_name_result__isset {
    - _ThriftHiveMetastore_append_partition_by_name_result__isset() : success(false), o1(false), o2(false), o3(false) {}
    +typedef struct _ThriftHiveMetastore_drop_partition_with_environment_context_result__isset {
    + _ThriftHiveMetastore_drop_partition_with_environment_context_result__isset() : success(false), o1(false), o2(false) {}
        bool success;
        bool o1;
        bool o2;
    - bool o3;
    -} _ThriftHiveMetastore_append_partition_by_name_result__isset;
    +} _ThriftHiveMetastore_drop_partition_with_environment_context_result__isset;

    -class ThriftHiveMetastore_append_partition_by_name_result {
    +class ThriftHiveMetastore_drop_partition_with_environment_context_result {
       public:

    - ThriftHiveMetastore_append_partition_by_name_result() {
    + ThriftHiveMetastore_drop_partition_with_environment_context_result() : success(0) {
        }

    - virtual ~ThriftHiveMetastore_append_partition_by_name_result() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_with_environment_context_result() throw() {}

    - Partition success;
    - InvalidObjectException o1;
    - AlreadyExistsException o2;
    - MetaException o3;
    + bool success;
    + NoSuchObjectException o1;
    + MetaException o2;

    - _ThriftHiveMetastore_append_partition_by_name_result__isset __isset;
    + _ThriftHiveMetastore_drop_partition_with_environment_context_result__isset __isset;

    - void __set_success(const Partition& val) {
    + void __set_success(const bool val) {
          success = val;
        }

    - void __set_o1(const InvalidObjectException& val) {
    + void __set_o1(const NoSuchObjectException& val) {
          o1 = val;
        }

    - void __set_o2(const AlreadyExistsException& val) {
    + void __set_o2(const MetaException& val) {
          o2 = val;
        }

    - void __set_o3(const MetaException& val) {
    - o3 = val;
    - }
    -
    - bool operator == (const ThriftHiveMetastore_append_partition_by_name_result & rhs) const
    + bool operator == (const ThriftHiveMetastore_drop_partition_with_environment_context_result & rhs) const
        {
          if (!(success == rhs.success))
            return false;
    @@ -4004,68 +4824,64 @@ class ThriftHiveMetastore_append_partiti
            return false;
          if (!(o2 == rhs.o2))
            return false;
    - if (!(o3 == rhs.o3))
    - return false;
          return true;
        }
    - bool operator != (const ThriftHiveMetastore_append_partition_by_name_result &rhs) const {
    + bool operator != (const ThriftHiveMetastore_drop_partition_with_environment_context_result &rhs) const {
          return !(*this == rhs);
        }

    - bool operator < (const ThriftHiveMetastore_append_partition_by_name_result & ) const;
    + bool operator < (const ThriftHiveMetastore_drop_partition_with_environment_context_result & ) const;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
        uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;

      };

    -typedef struct _ThriftHiveMetastore_append_partition_by_name_presult__isset {
    - _ThriftHiveMetastore_append_partition_by_name_presult__isset() : success(false), o1(false), o2(false), o3(false) {}
    +typedef struct _ThriftHiveMetastore_drop_partition_with_environment_context_presult__isset {
    + _ThriftHiveMetastore_drop_partition_with_environment_context_presult__isset() : success(false), o1(false), o2(false) {}
        bool success;
        bool o1;
        bool o2;
    - bool o3;
    -} _ThriftHiveMetastore_append_partition_by_name_presult__isset;
    +} _ThriftHiveMetastore_drop_partition_with_environment_context_presult__isset;

    -class ThriftHiveMetastore_append_partition_by_name_presult {
    +class ThriftHiveMetastore_drop_partition_with_environment_context_presult {
       public:


    - virtual ~ThriftHiveMetastore_append_partition_by_name_presult() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_with_environment_context_presult() throw() {}

    - Partition* success;
    - InvalidObjectException o1;
    - AlreadyExistsException o2;
    - MetaException o3;
    + bool* success;
    + NoSuchObjectException o1;
    + MetaException o2;

    - _ThriftHiveMetastore_append_partition_by_name_presult__isset __isset;
    + _ThriftHiveMetastore_drop_partition_with_environment_context_presult__isset __isset;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);

      };

    -typedef struct _ThriftHiveMetastore_drop_partition_args__isset {
    - _ThriftHiveMetastore_drop_partition_args__isset() : db_name(false), tbl_name(false), part_vals(false), deleteData(false) {}
    +typedef struct _ThriftHiveMetastore_drop_partition_by_name_args__isset {
    + _ThriftHiveMetastore_drop_partition_by_name_args__isset() : db_name(false), tbl_name(false), part_name(false), deleteData(false) {}
        bool db_name;
        bool tbl_name;
    - bool part_vals;
    + bool part_name;
        bool deleteData;
    -} _ThriftHiveMetastore_drop_partition_args__isset;
    +} _ThriftHiveMetastore_drop_partition_by_name_args__isset;

    -class ThriftHiveMetastore_drop_partition_args {
    +class ThriftHiveMetastore_drop_partition_by_name_args {
       public:

    - ThriftHiveMetastore_drop_partition_args() : db_name(), tbl_name(), deleteData(0) {
    + ThriftHiveMetastore_drop_partition_by_name_args() : db_name(), tbl_name(), part_name(), deleteData(0) {
        }

    - virtual ~ThriftHiveMetastore_drop_partition_args() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_by_name_args() throw() {}

        std::string db_name;
        std::string tbl_name;
    - std::vector<std::string> part_vals;
    + std::string part_name;
        bool deleteData;

    - _ThriftHiveMetastore_drop_partition_args__isset __isset;
    + _ThriftHiveMetastore_drop_partition_by_name_args__isset __isset;

        void __set_db_name(const std::string& val) {
          db_name = val;
    @@ -4075,31 +4891,31 @@ class ThriftHiveMetastore_drop_partition
          tbl_name = val;
        }

    - void __set_part_vals(const std::vector<std::string> & val) {
    - part_vals = val;
    + void __set_part_name(const std::string& val) {
    + part_name = val;
        }

        void __set_deleteData(const bool val) {
          deleteData = val;
        }

    - bool operator == (const ThriftHiveMetastore_drop_partition_args & rhs) const
    + bool operator == (const ThriftHiveMetastore_drop_partition_by_name_args & rhs) const
        {
          if (!(db_name == rhs.db_name))
            return false;
          if (!(tbl_name == rhs.tbl_name))
            return false;
    - if (!(part_vals == rhs.part_vals))
    + if (!(part_name == rhs.part_name))
            return false;
          if (!(deleteData == rhs.deleteData))
            return false;
          return true;
        }
    - bool operator != (const ThriftHiveMetastore_drop_partition_args &rhs) const {
    + bool operator != (const ThriftHiveMetastore_drop_partition_by_name_args &rhs) const {
          return !(*this == rhs);
        }

    - bool operator < (const ThriftHiveMetastore_drop_partition_args & ) const;
    + bool operator < (const ThriftHiveMetastore_drop_partition_by_name_args & ) const;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
        uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    @@ -4107,41 +4923,41 @@ class ThriftHiveMetastore_drop_partition
      };


    -class ThriftHiveMetastore_drop_partition_pargs {
    +class ThriftHiveMetastore_drop_partition_by_name_pargs {
       public:


    - virtual ~ThriftHiveMetastore_drop_partition_pargs() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_by_name_pargs() throw() {}

        const std::string* db_name;
        const std::string* tbl_name;
    - const std::vector<std::string> * part_vals;
    + const std::string* part_name;
        const bool* deleteData;

        uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;

      };

    -typedef struct _ThriftHiveMetastore_drop_partition_result__isset {
    - _ThriftHiveMetastore_drop_partition_result__isset() : success(false), o1(false), o2(false) {}
    +typedef struct _ThriftHiveMetastore_drop_partition_by_name_result__isset {
    + _ThriftHiveMetastore_drop_partition_by_name_result__isset() : success(false), o1(false), o2(false) {}
        bool success;
        bool o1;
        bool o2;
    -} _ThriftHiveMetastore_drop_partition_result__isset;
    +} _ThriftHiveMetastore_drop_partition_by_name_result__isset;

    -class ThriftHiveMetastore_drop_partition_result {
    +class ThriftHiveMetastore_drop_partition_by_name_result {
       public:

    - ThriftHiveMetastore_drop_partition_result() : success(0) {
    + ThriftHiveMetastore_drop_partition_by_name_result() : success(0) {
        }

    - virtual ~ThriftHiveMetastore_drop_partition_result() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_by_name_result() throw() {}

        bool success;
        NoSuchObjectException o1;
        MetaException o2;

    - _ThriftHiveMetastore_drop_partition_result__isset __isset;
    + _ThriftHiveMetastore_drop_partition_by_name_result__isset __isset;

        void __set_success(const bool val) {
          success = val;
    @@ -4155,7 +4971,7 @@ class ThriftHiveMetastore_drop_partition
          o2 = val;
        }

    - bool operator == (const ThriftHiveMetastore_drop_partition_result & rhs) const
    + bool operator == (const ThriftHiveMetastore_drop_partition_by_name_result & rhs) const
        {
          if (!(success == rhs.success))
            return false;
    @@ -4165,62 +4981,64 @@ class ThriftHiveMetastore_drop_partition
            return false;
          return true;
        }
    - bool operator != (const ThriftHiveMetastore_drop_partition_result &rhs) const {
    + bool operator != (const ThriftHiveMetastore_drop_partition_by_name_result &rhs) const {
          return !(*this == rhs);
        }

    - bool operator < (const ThriftHiveMetastore_drop_partition_result & ) const;
    + bool operator < (const ThriftHiveMetastore_drop_partition_by_name_result & ) const;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
        uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;

      };

    -typedef struct _ThriftHiveMetastore_drop_partition_presult__isset {
    - _ThriftHiveMetastore_drop_partition_presult__isset() : success(false), o1(false), o2(false) {}
    +typedef struct _ThriftHiveMetastore_drop_partition_by_name_presult__isset {
    + _ThriftHiveMetastore_drop_partition_by_name_presult__isset() : success(false), o1(false), o2(false) {}
        bool success;
        bool o1;
        bool o2;
    -} _ThriftHiveMetastore_drop_partition_presult__isset;
    +} _ThriftHiveMetastore_drop_partition_by_name_presult__isset;

    -class ThriftHiveMetastore_drop_partition_presult {
    +class ThriftHiveMetastore_drop_partition_by_name_presult {
       public:


    - virtual ~ThriftHiveMetastore_drop_partition_presult() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_by_name_presult() throw() {}

        bool* success;
        NoSuchObjectException o1;
        MetaException o2;

    - _ThriftHiveMetastore_drop_partition_presult__isset __isset;
    + _ThriftHiveMetastore_drop_partition_by_name_presult__isset __isset;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);

      };

    -typedef struct _ThriftHiveMetastore_drop_partition_by_name_args__isset {
    - _ThriftHiveMetastore_drop_partition_by_name_args__isset() : db_name(false), tbl_name(false), part_name(false), deleteData(false) {}
    +typedef struct _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args__isset {
    + _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args__isset() : db_name(false), tbl_name(false), part_name(false), deleteData(false), environment_context(false) {}
        bool db_name;
        bool tbl_name;
        bool part_name;
        bool deleteData;
    -} _ThriftHiveMetastore_drop_partition_by_name_args__isset;
    + bool environment_context;
    +} _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args__isset;

    -class ThriftHiveMetastore_drop_partition_by_name_args {
    +class ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args {
       public:

    - ThriftHiveMetastore_drop_partition_by_name_args() : db_name(), tbl_name(), part_name(), deleteData(0) {
    + ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args() : db_name(), tbl_name(), part_name(), deleteData(0) {
        }

    - virtual ~ThriftHiveMetastore_drop_partition_by_name_args() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args() throw() {}

        std::string db_name;
        std::string tbl_name;
        std::string part_name;
        bool deleteData;
    + EnvironmentContext environment_context;

    - _ThriftHiveMetastore_drop_partition_by_name_args__isset __isset;
    + _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args__isset __isset;

        void __set_db_name(const std::string& val) {
          db_name = val;
    @@ -4238,7 +5056,11 @@ class ThriftHiveMetastore_drop_partition
          deleteData = val;
        }

    - bool operator == (const ThriftHiveMetastore_drop_partition_by_name_args & rhs) const
    + void __set_environment_context(const EnvironmentContext& val) {
    + environment_context = val;
    + }
    +
    + bool operator == (const ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args & rhs) const
        {
          if (!(db_name == rhs.db_name))
            return false;
    @@ -4248,13 +5070,15 @@ class ThriftHiveMetastore_drop_partition
            return false;
          if (!(deleteData == rhs.deleteData))
            return false;
    + if (!(environment_context == rhs.environment_context))
    + return false;
          return true;
        }
    - bool operator != (const ThriftHiveMetastore_drop_partition_by_name_args &rhs) const {
    + bool operator != (const ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args &rhs) const {
          return !(*this == rhs);
        }

    - bool operator < (const ThriftHiveMetastore_drop_partition_by_name_args & ) const;
    + bool operator < (const ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args & ) const;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
        uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
    @@ -4262,41 +5086,42 @@ class ThriftHiveMetastore_drop_partition
      };


    -class ThriftHiveMetastore_drop_partition_by_name_pargs {
    +class ThriftHiveMetastore_drop_partition_by_name_with_environment_context_pargs {
       public:


    - virtual ~ThriftHiveMetastore_drop_partition_by_name_pargs() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_by_name_with_environment_context_pargs() throw() {}

        const std::string* db_name;
        const std::string* tbl_name;
        const std::string* part_name;
        const bool* deleteData;
    + const EnvironmentContext* environment_context;

        uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;

      };

    -typedef struct _ThriftHiveMetastore_drop_partition_by_name_result__isset {
    - _ThriftHiveMetastore_drop_partition_by_name_result__isset() : success(false), o1(false), o2(false) {}
    +typedef struct _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result__isset {
    + _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result__isset() : success(false), o1(false), o2(false) {}
        bool success;
        bool o1;
        bool o2;
    -} _ThriftHiveMetastore_drop_partition_by_name_result__isset;
    +} _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result__isset;

    -class ThriftHiveMetastore_drop_partition_by_name_result {
    +class ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result {
       public:

    - ThriftHiveMetastore_drop_partition_by_name_result() : success(0) {
    + ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result() : success(0) {
        }

    - virtual ~ThriftHiveMetastore_drop_partition_by_name_result() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result() throw() {}

        bool success;
        NoSuchObjectException o1;
        MetaException o2;

    - _ThriftHiveMetastore_drop_partition_by_name_result__isset __isset;
    + _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result__isset __isset;

        void __set_success(const bool val) {
          success = val;
    @@ -4310,7 +5135,7 @@ class ThriftHiveMetastore_drop_partition
          o2 = val;
        }

    - bool operator == (const ThriftHiveMetastore_drop_partition_by_name_result & rhs) const
    + bool operator == (const ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result & rhs) const
        {
          if (!(success == rhs.success))
            return false;
    @@ -4320,35 +5145,35 @@ class ThriftHiveMetastore_drop_partition
            return false;
          return true;
        }
    - bool operator != (const ThriftHiveMetastore_drop_partition_by_name_result &rhs) const {
    + bool operator != (const ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result &rhs) const {
          return !(*this == rhs);
        }

    - bool operator < (const ThriftHiveMetastore_drop_partition_by_name_result & ) const;
    + bool operator < (const ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result & ) const;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
        uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;

      };

    -typedef struct _ThriftHiveMetastore_drop_partition_by_name_presult__isset {
    - _ThriftHiveMetastore_drop_partition_by_name_presult__isset() : success(false), o1(false), o2(false) {}
    +typedef struct _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_presult__isset {
    + _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_presult__isset() : success(false), o1(false), o2(false) {}
        bool success;
        bool o1;
        bool o2;
    -} _ThriftHiveMetastore_drop_partition_by_name_presult__isset;
    +} _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_presult__isset;

    -class ThriftHiveMetastore_drop_partition_by_name_presult {
    +class ThriftHiveMetastore_drop_partition_by_name_with_environment_context_presult {
       public:


    - virtual ~ThriftHiveMetastore_drop_partition_by_name_presult() throw() {}
    + virtual ~ThriftHiveMetastore_drop_partition_by_name_with_environment_context_presult() throw() {}

        bool* success;
        NoSuchObjectException o1;
        MetaException o2;

    - _ThriftHiveMetastore_drop_partition_by_name_presult__isset __isset;
    + _ThriftHiveMetastore_drop_partition_by_name_with_environment_context_presult__isset __isset;

        uint32_t read(::apache::thrift::protocol::TProtocol* iprot);

    @@ -11009,6 +11834,9 @@ class ThriftHiveMetastoreClient : virtua
        void drop_table(const std::string& dbname, const std::string& name, const bool deleteData);
        void send_drop_table(const std::string& dbname, const std::string& name, const bool deleteData);
        void recv_drop_table();
    + void drop_table_with_environment_context(const std::string& dbname, const std::string& name, const bool deleteData, const EnvironmentContext& environment_context);
    + void send_drop_table_with_environment_context(const std::string& dbname, const std::string& name, const bool deleteData, const EnvironmentContext& environment_context);
    + void recv_drop_table_with_environment_context();
        void get_tables(std::vector<std::string> & _return, const std::string& db_name, const std::string& pattern);
        void send_get_tables(const std::string& db_name, const std::string& pattern);
        void recv_get_tables(std::vector<std::string> & _return);
    @@ -11042,15 +11870,27 @@ class ThriftHiveMetastoreClient : virtua
        void append_partition(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals);
        void send_append_partition(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals);
        void recv_append_partition(Partition& _return);
    + void append_partition_with_environment_context(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const EnvironmentContext& environment_context);
    + void send_append_partition_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const EnvironmentContext& environment_context);
    + void recv_append_partition_with_environment_context(Partition& _return);
        void append_partition_by_name(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::string& part_name);
        void send_append_partition_by_name(const std::string& db_name, const std::string& tbl_name, const std::string& part_name);
        void recv_append_partition_by_name(Partition& _return);
    + void append_partition_by_name_with_environment_context(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const EnvironmentContext& environment_context);
    + void send_append_partition_by_name_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const EnvironmentContext& environment_context);
    + void recv_append_partition_by_name_with_environment_context(Partition& _return);
        bool drop_partition(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const bool deleteData);
        void send_drop_partition(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const bool deleteData);
        bool recv_drop_partition();
    + bool drop_partition_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const bool deleteData, const EnvironmentContext& environment_context);
    + void send_drop_partition_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const bool deleteData, const EnvironmentContext& environment_context);
    + bool recv_drop_partition_with_environment_context();
        bool drop_partition_by_name(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const bool deleteData);
        void send_drop_partition_by_name(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const bool deleteData);
        bool recv_drop_partition_by_name();
    + bool drop_partition_by_name_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const bool deleteData, const EnvironmentContext& environment_context);
    + void send_drop_partition_by_name_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const bool deleteData, const EnvironmentContext& environment_context);
    + bool recv_drop_partition_by_name_with_environment_context();
        void get_partition(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals);
        void send_get_partition(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals);
        void recv_get_partition(Partition& _return);
    @@ -11214,6 +12054,7 @@ class ThriftHiveMetastoreProcessor : pub
        void process_create_table(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_create_table_with_environment_context(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_drop_table(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
    + void process_drop_table_with_environment_context(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_get_tables(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_get_all_tables(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_get_table(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
    @@ -11225,9 +12066,13 @@ class ThriftHiveMetastoreProcessor : pub
        void process_add_partition_with_environment_context(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_add_partitions(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_append_partition(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
    + void process_append_partition_with_environment_context(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_append_partition_by_name(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
    + void process_append_partition_by_name_with_environment_context(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_drop_partition(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
    + void process_drop_partition_with_environment_context(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_drop_partition_by_name(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
    + void process_drop_partition_by_name_with_environment_context(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_get_partition(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_get_partition_with_auth(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
        void process_get_partition_by_name(int32_t seqid, ::apache::thrift::protocol::TProtocol* iprot, ::apache::thrift::protocol::TProtocol* oprot, void* callContext);
    @@ -11293,6 +12138,7 @@ class ThriftHiveMetastoreProcessor : pub
          processMap_["create_table"] = &ThriftHiveMetastoreProcessor::process_create_table;
          processMap_["create_table_with_environment_context"] = &ThriftHiveMetastoreProcessor::process_create_table_with_environment_context;
          processMap_["drop_table"] = &ThriftHiveMetastoreProcessor::process_drop_table;
    + processMap_["drop_table_with_environment_context"] = &ThriftHiveMetastoreProcessor::process_drop_table_with_environment_context;
          processMap_["get_tables"] = &ThriftHiveMetastoreProcessor::process_get_tables;
          processMap_["get_all_tables"] = &ThriftHiveMetastoreProcessor::process_get_all_tables;
          processMap_["get_table"] = &ThriftHiveMetastoreProcessor::process_get_table;
    @@ -11304,9 +12150,13 @@ class ThriftHiveMetastoreProcessor : pub
          processMap_["add_partition_with_environment_context"] = &ThriftHiveMetastoreProcessor::process_add_partition_with_environment_context;
          processMap_["add_partitions"] = &ThriftHiveMetastoreProcessor::process_add_partitions;
          processMap_["append_partition"] = &ThriftHiveMetastoreProcessor::process_append_partition;
    + processMap_["append_partition_with_environment_context"] = &ThriftHiveMetastoreProcessor::process_append_partition_with_environment_context;
          processMap_["append_partition_by_name"] = &ThriftHiveMetastoreProcessor::process_append_partition_by_name;
    + processMap_["append_partition_by_name_with_environment_context"] = &ThriftHiveMetastoreProcessor::process_append_partition_by_name_with_environment_context;
          processMap_["drop_partition"] = &ThriftHiveMetastoreProcessor::process_drop_partition;
    + processMap_["drop_partition_with_environment_context"] = &ThriftHiveMetastoreProcessor::process_drop_partition_with_environment_context;
          processMap_["drop_partition_by_name"] = &ThriftHiveMetastoreProcessor::process_drop_partition_by_name;
    + processMap_["drop_partition_by_name_with_environment_context"] = &ThriftHiveMetastoreProcessor::process_drop_partition_by_name_with_environment_context;
          processMap_["get_partition"] = &ThriftHiveMetastoreProcessor::process_get_partition;
          processMap_["get_partition_with_auth"] = &ThriftHiveMetastoreProcessor::process_get_partition_with_auth;
          processMap_["get_partition_by_name"] = &ThriftHiveMetastoreProcessor::process_get_partition_by_name;
    @@ -11528,6 +12378,15 @@ class ThriftHiveMetastoreMultiface : vir
          ifaces_[i]->drop_table(dbname, name, deleteData);
        }

    + void drop_table_with_environment_context(const std::string& dbname, const std::string& name, const bool deleteData, const EnvironmentContext& environment_context) {
    + size_t sz = ifaces_.size();
    + size_t i = 0;
    + for (; i < (sz - 1); ++i) {
    + ifaces_[i]->drop_table_with_environment_context(dbname, name, deleteData, environment_context);
    + }
    + ifaces_[i]->drop_table_with_environment_context(dbname, name, deleteData, environment_context);
    + }
    +
        void get_tables(std::vector<std::string> & _return, const std::string& db_name, const std::string& pattern) {
          size_t sz = ifaces_.size();
          size_t i = 0;
    @@ -11635,6 +12494,16 @@ class ThriftHiveMetastoreMultiface : vir
          return;
        }

    + void append_partition_with_environment_context(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const EnvironmentContext& environment_context) {
    + size_t sz = ifaces_.size();
    + size_t i = 0;
    + for (; i < (sz - 1); ++i) {
    + ifaces_[i]->append_partition_with_environment_context(_return, db_name, tbl_name, part_vals, environment_context);
    + }
    + ifaces_[i]->append_partition_with_environment_context(_return, db_name, tbl_name, part_vals, environment_context);
    + return;
    + }
    +
        void append_partition_by_name(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::string& part_name) {
          size_t sz = ifaces_.size();
          size_t i = 0;
    @@ -11645,6 +12514,16 @@ class ThriftHiveMetastoreMultiface : vir
          return;
        }

    + void append_partition_by_name_with_environment_context(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const EnvironmentContext& environment_context) {
    + size_t sz = ifaces_.size();
    + size_t i = 0;
    + for (; i < (sz - 1); ++i) {
    + ifaces_[i]->append_partition_by_name_with_environment_context(_return, db_name, tbl_name, part_name, environment_context);
    + }
    + ifaces_[i]->append_partition_by_name_with_environment_context(_return, db_name, tbl_name, part_name, environment_context);
    + return;
    + }
    +
        bool drop_partition(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const bool deleteData) {
          size_t sz = ifaces_.size();
          size_t i = 0;
    @@ -11654,6 +12533,15 @@ class ThriftHiveMetastoreMultiface : vir
          return ifaces_[i]->drop_partition(db_name, tbl_name, part_vals, deleteData);
        }

    + bool drop_partition_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const bool deleteData, const EnvironmentContext& environment_context) {
    + size_t sz = ifaces_.size();
    + size_t i = 0;
    + for (; i < (sz - 1); ++i) {
    + ifaces_[i]->drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context);
    + }
    + return ifaces_[i]->drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context);
    + }
    +
        bool drop_partition_by_name(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const bool deleteData) {
          size_t sz = ifaces_.size();
          size_t i = 0;
    @@ -11663,6 +12551,15 @@ class ThriftHiveMetastoreMultiface : vir
          return ifaces_[i]->drop_partition_by_name(db_name, tbl_name, part_name, deleteData);
        }

    + bool drop_partition_by_name_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const bool deleteData, const EnvironmentContext& environment_context) {
    + size_t sz = ifaces_.size();
    + size_t i = 0;
    + for (; i < (sz - 1); ++i) {
    + ifaces_[i]->drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context);
    + }
    + return ifaces_[i]->drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context);
    + }
    +
        void get_partition(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals) {
          size_t sz = ifaces_.size();
          size_t i = 0;

    Modified: hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore_server.skeleton.cpp Tue Feb 12 18:52:55 2013
    @@ -97,6 +97,11 @@ class ThriftHiveMetastoreHandler : virtu
          printf("drop_table\n");
        }

    + void drop_table_with_environment_context(const std::string& dbname, const std::string& name, const bool deleteData, const EnvironmentContext& environment_context) {
    + // Your implementation goes here
    + printf("drop_table_with_environment_context\n");
    + }
    +
        void get_tables(std::vector<std::string> & _return, const std::string& db_name, const std::string& pattern) {
          // Your implementation goes here
          printf("get_tables\n");
    @@ -152,21 +157,41 @@ class ThriftHiveMetastoreHandler : virtu
          printf("append_partition\n");
        }

    + void append_partition_with_environment_context(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const EnvironmentContext& environment_context) {
    + // Your implementation goes here
    + printf("append_partition_with_environment_context\n");
    + }
    +
        void append_partition_by_name(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::string& part_name) {
          // Your implementation goes here
          printf("append_partition_by_name\n");
        }

    + void append_partition_by_name_with_environment_context(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const EnvironmentContext& environment_context) {
    + // Your implementation goes here
    + printf("append_partition_by_name_with_environment_context\n");
    + }
    +
        bool drop_partition(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const bool deleteData) {
          // Your implementation goes here
          printf("drop_partition\n");
        }

    + bool drop_partition_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals, const bool deleteData, const EnvironmentContext& environment_context) {
    + // Your implementation goes here
    + printf("drop_partition_with_environment_context\n");
    + }
    +
        bool drop_partition_by_name(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const bool deleteData) {
          // Your implementation goes here
          printf("drop_partition_by_name\n");
        }

    + bool drop_partition_by_name_with_environment_context(const std::string& db_name, const std::string& tbl_name, const std::string& part_name, const bool deleteData, const EnvironmentContext& environment_context) {
    + // Your implementation goes here
    + printf("drop_partition_by_name_with_environment_context\n");
    + }
    +
        void get_partition(Partition& _return, const std::string& db_name, const std::string& tbl_name, const std::vector<std::string> & part_vals) {
          // Your implementation goes here
          printf("get_partition\n");

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java Tue Feb 12 18:52:55 2013
    @@ -708,7 +708,7 @@ public class Database implements org.apa
                      for (int _i79 = 0; _i79 < _map78.size; ++_i79)
                      {
                        String _key80; // required
    - String _val81; // optional
    + String _val81; // required
                        _key80 = iprot.readString();
                        _val81 = iprot.readString();
                        struct.parameters.put(_key80, _val81);
    @@ -858,7 +858,7 @@ public class Database implements org.apa
                for (int _i85 = 0; _i85 < _map84.size; ++_i85)
                {
                  String _key86; // required
    - String _val87; // optional
    + String _val87; // required
                  _key86 = iprot.readString();
                  _val87 = iprot.readString();
                  struct.parameters.put(_key86, _val87);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java Tue Feb 12 18:52:55 2013
    @@ -356,7 +356,7 @@ public class EnvironmentContext implemen
                      for (int _i247 = 0; _i247 < _map246.size; ++_i247)
                      {
                        String _key248; // required
    - String _val249; // optional
    + String _val249; // required
                        _key248 = iprot.readString();
                        _val249 = iprot.readString();
                        struct.properties.put(_key248, _val249);
    @@ -439,7 +439,7 @@ public class EnvironmentContext implemen
                for (int _i253 = 0; _i253 < _map252.size; ++_i253)
                {
                  String _key254; // required
    - String _val255; // optional
    + String _val255; // required
                  _key254 = iprot.readString();
                  _val255 = iprot.readString();
                  struct.properties.put(_key254, _val255);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java Tue Feb 12 18:52:55 2013
    @@ -1145,7 +1145,7 @@ public class Index implements org.apache
                      for (int _i211 = 0; _i211 < _map210.size; ++_i211)
                      {
                        String _key212; // required
    - String _val213; // optional
    + String _val213; // required
                        _key212 = iprot.readString();
                        _val213 = iprot.readString();
                        struct.parameters.put(_key212, _val213);
    @@ -1362,7 +1362,7 @@ public class Index implements org.apache
                for (int _i217 = 0; _i217 < _map216.size; ++_i217)
                {
                  String _key218; // required
    - String _val219; // optional
    + String _val219; // required
                  _key218 = iprot.readString();
                  _val219 = iprot.readString();
                  struct.parameters.put(_key218, _val219);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java Tue Feb 12 18:52:55 2013
    @@ -1005,7 +1005,7 @@ public class Partition implements org.ap
                      for (int _i196 = 0; _i196 < _map195.size; ++_i196)
                      {
                        String _key197; // required
    - String _val198; // optional
    + String _val198; // required
                        _key197 = iprot.readString();
                        _val198 = iprot.readString();
                        struct.parameters.put(_key197, _val198);
    @@ -1219,7 +1219,7 @@ public class Partition implements org.ap
                for (int _i207 = 0; _i207 < _map206.size; ++_i207)
                {
                  String _key208; // required
    - String _val209; // optional
    + String _val209; // required
                  _key208 = iprot.readString();
                  _val209 = iprot.readString();
                  struct.parameters.put(_key208, _val209);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java Tue Feb 12 18:52:55 2013
    @@ -580,7 +580,7 @@ public class PrincipalPrivilegeSet imple
                      for (int _i25 = 0; _i25 < _map24.size; ++_i25)
                      {
                        String _key26; // required
    - List<PrivilegeGrantInfo> _val27; // optional
    + List<PrivilegeGrantInfo> _val27; // required
                        _key26 = iprot.readString();
                        {
                          org.apache.thrift.protocol.TList _list28 = iprot.readListBegin();
    @@ -611,7 +611,7 @@ public class PrincipalPrivilegeSet imple
                      for (int _i32 = 0; _i32 < _map31.size; ++_i32)
                      {
                        String _key33; // required
    - List<PrivilegeGrantInfo> _val34; // optional
    + List<PrivilegeGrantInfo> _val34; // required
                        _key33 = iprot.readString();
                        {
                          org.apache.thrift.protocol.TList _list35 = iprot.readListBegin();
    @@ -642,7 +642,7 @@ public class PrincipalPrivilegeSet imple
                      for (int _i39 = 0; _i39 < _map38.size; ++_i39)
                      {
                        String _key40; // required
    - List<PrivilegeGrantInfo> _val41; // optional
    + List<PrivilegeGrantInfo> _val41; // required
                        _key40 = iprot.readString();
                        {
                          org.apache.thrift.protocol.TList _list42 = iprot.readListBegin();
    @@ -827,7 +827,7 @@ public class PrincipalPrivilegeSet imple
                for (int _i58 = 0; _i58 < _map57.size; ++_i58)
                {
                  String _key59; // required
    - List<PrivilegeGrantInfo> _val60; // optional
    + List<PrivilegeGrantInfo> _val60; // required
                  _key59 = iprot.readString();
                  {
                    org.apache.thrift.protocol.TList _list61 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
    @@ -852,7 +852,7 @@ public class PrincipalPrivilegeSet imple
                for (int _i65 = 0; _i65 < _map64.size; ++_i65)
                {
                  String _key66; // required
    - List<PrivilegeGrantInfo> _val67; // optional
    + List<PrivilegeGrantInfo> _val67; // required
                  _key66 = iprot.readString();
                  {
                    org.apache.thrift.protocol.TList _list68 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
    @@ -877,7 +877,7 @@ public class PrincipalPrivilegeSet imple
                for (int _i72 = 0; _i72 < _map71.size; ++_i72)
                {
                  String _key73; // required
    - List<PrivilegeGrantInfo> _val74; // optional
    + List<PrivilegeGrantInfo> _val74; // required
                  _key73 = iprot.readString();
                  {
                    org.apache.thrift.protocol.TList _list75 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java Tue Feb 12 18:52:55 2013
    @@ -476,7 +476,7 @@ public class Schema implements org.apach
                      for (int _i232 = 0; _i232 < _map231.size; ++_i232)
                      {
                        String _key233; // required
    - String _val234; // optional
    + String _val234; // required
                        _key233 = iprot.readString();
                        _val234 = iprot.readString();
                        struct.properties.put(_key233, _val234);
    @@ -597,7 +597,7 @@ public class Schema implements org.apach
                for (int _i243 = 0; _i243 < _map242.size; ++_i243)
                {
                  String _key244; // required
    - String _val245; // optional
    + String _val245; // required
                  _key244 = iprot.readString();
                  _val245 = iprot.readString();
                  struct.properties.put(_key244, _val245);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java Tue Feb 12 18:52:55 2013
    @@ -534,7 +534,7 @@ public class SerDeInfo implements org.ap
                      for (int _i89 = 0; _i89 < _map88.size; ++_i89)
                      {
                        String _key90; // required
    - String _val91; // optional
    + String _val91; // required
                        _key90 = iprot.readString();
                        _val91 = iprot.readString();
                        struct.parameters.put(_key90, _val91);
    @@ -647,7 +647,7 @@ public class SerDeInfo implements org.ap
                for (int _i95 = 0; _i95 < _map94.size; ++_i95)
                {
                  String _key96; // required
    - String _val97; // optional
    + String _val97; // required
                  _key96 = iprot.readString();
                  _val97 = iprot.readString();
                  struct.parameters.put(_key96, _val97);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SkewedInfo.java Tue Feb 12 18:52:55 2013
    @@ -613,7 +613,7 @@ public class SkewedInfo implements org.a
                      for (int _i108 = 0; _i108 < _map107.size; ++_i108)
                      {
                        List<String> _key109; // required
    - String _val110; // optional
    + String _val110; // required
                        {
                          org.apache.thrift.protocol.TList _list111 = iprot.readListBegin();
                          _key109 = new ArrayList<String>(_list111.size);
    @@ -815,7 +815,7 @@ public class SkewedInfo implements org.a
                for (int _i134 = 0; _i134 < _map133.size; ++_i134)
                {
                  List<String> _key135; // required
    - String _val136; // optional
    + String _val136; // required
                  {
                    org.apache.thrift.protocol.TList _list137 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
                    _key135 = new ArrayList<String>(_list137.size);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java Tue Feb 12 18:52:55 2013
    @@ -1410,7 +1410,7 @@ public class StorageDescriptor implement
                      for (int _i150 = 0; _i150 < _map149.size; ++_i150)
                      {
                        String _key151; // required
    - String _val152; // optional
    + String _val152; // required
                        _key151 = iprot.readString();
                        _val152 = iprot.readString();
                        struct.parameters.put(_key151, _val152);
    @@ -1734,7 +1734,7 @@ public class StorageDescriptor implement
                for (int _i171 = 0; _i171 < _map170.size; ++_i171)
                {
                  String _key172; // required
    - String _val173; // optional
    + String _val173; // required
                  _key172 = iprot.readString();
                  _val173 = iprot.readString();
                  struct.parameters.put(_key172, _val173);

    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java Tue Feb 12 18:52:55 2013
    @@ -1423,7 +1423,7 @@ public class Table implements org.apache
                      for (int _i178 = 0; _i178 < _map177.size; ++_i178)
                      {
                        String _key179; // required
    - String _val180; // optional
    + String _val180; // required
                        _key179 = iprot.readString();
                        _val180 = iprot.readString();
                        struct.parameters.put(_key179, _val180);
    @@ -1723,7 +1723,7 @@ public class Table implements org.apache
                for (int _i189 = 0; _i189 < _map188.size; ++_i189)
                {
                  String _key190; // required
    - String _val191; // optional
    + String _val191; // required
                  _key190 = iprot.readString();
                  _val191 = iprot.readString();
                  struct.parameters.put(_key190, _val191);
  • Kevinwilfong at Feb 12, 2013 at 6:53 pm
    Modified: hive/trunk/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php Tue Feb 12 18:52:55 2013
    @@ -31,6 +31,7 @@ interface ThriftHiveMetastoreIf extends
        public function create_table(\metastore\Table $tbl);
        public function create_table_with_environment_context(\metastore\Table $tbl, \metastore\EnvironmentContext $environment_context);
        public function drop_table($dbname, $name, $deleteData);
    + public function drop_table_with_environment_context($dbname, $name, $deleteData, \metastore\EnvironmentContext $environment_context);
        public function get_tables($db_name, $pattern);
        public function get_all_tables($db_name);
        public function get_table($dbname, $tbl_name);
    @@ -42,9 +43,13 @@ interface ThriftHiveMetastoreIf extends
        public function add_partition_with_environment_context(\metastore\Partition $new_part, \metastore\EnvironmentContext $environment_context);
        public function add_partitions($new_parts);
        public function append_partition($db_name, $tbl_name, $part_vals);
    + public function append_partition_with_environment_context($db_name, $tbl_name, $part_vals, \metastore\EnvironmentContext $environment_context);
        public function append_partition_by_name($db_name, $tbl_name, $part_name);
    + public function append_partition_by_name_with_environment_context($db_name, $tbl_name, $part_name, \metastore\EnvironmentContext $environment_context);
        public function drop_partition($db_name, $tbl_name, $part_vals, $deleteData);
    + public function drop_partition_with_environment_context($db_name, $tbl_name, $part_vals, $deleteData, \metastore\EnvironmentContext $environment_context);
        public function drop_partition_by_name($db_name, $tbl_name, $part_name, $deleteData);
    + public function drop_partition_by_name_with_environment_context($db_name, $tbl_name, $part_name, $deleteData, \metastore\EnvironmentContext $environment_context);
        public function get_partition($db_name, $tbl_name, $part_vals);
        public function get_partition_with_auth($db_name, $tbl_name, $part_vals, $user_name, $group_names);
        public function get_partition_by_name($db_name, $tbl_name, $part_name);
    @@ -960,6 +965,63 @@ class ThriftHiveMetastoreClient extends
          return;
        }

    + public function drop_table_with_environment_context($dbname, $name, $deleteData, \metastore\EnvironmentContext $environment_context)
    + {
    + $this->send_drop_table_with_environment_context($dbname, $name, $deleteData, $environment_context);
    + $this->recv_drop_table_with_environment_context();
    + }
    +
    + public function send_drop_table_with_environment_context($dbname, $name, $deleteData, \metastore\EnvironmentContext $environment_context)
    + {
    + $args = new \metastore\ThriftHiveMetastore_drop_table_with_environment_context_args();
    + $args->dbname = $dbname;
    + $args->name = $name;
    + $args->deleteData = $deleteData;
    + $args->environment_context = $environment_context;
    + $bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
    + if ($bin_accel)
    + {
    + thrift_protocol_write_binary($this->output_, 'drop_table_with_environment_context', TMessageType::CALL, $args, $this->seqid_, $this->output_->isStrictWrite());
    + }
    + else
    + {
    + $this->output_->writeMessageBegin('drop_table_with_environment_context', TMessageType::CALL, $this->seqid_);
    + $args->write($this->output_);
    + $this->output_->writeMessageEnd();
    + $this->output_->getTransport()->flush();
    + }
    + }
    +
    + public function recv_drop_table_with_environment_context()
    + {
    + $bin_accel = ($this->input_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_read_binary');
    + if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, '\metastore\ThriftHiveMetastore_drop_table_with_environment_context_result', $this->input_->isStrictRead());
    + else
    + {
    + $rseqid = 0;
    + $fname = null;
    + $mtype = 0;
    +
    + $this->input_->readMessageBegin($fname, $mtype, $rseqid);
    + if ($mtype == TMessageType::EXCEPTION) {
    + $x = new TApplicationException();
    + $x->read($this->input_);
    + $this->input_->readMessageEnd();
    + throw $x;
    + }
    + $result = new \metastore\ThriftHiveMetastore_drop_table_with_environment_context_result();
    + $result->read($this->input_);
    + $this->input_->readMessageEnd();
    + }
    + if ($result->o1 !== null) {
    + throw $result->o1;
    + }
    + if ($result->o3 !== null) {
    + throw $result->o3;
    + }
    + return;
    + }
    +
        public function get_tables($db_name, $pattern)
        {
          $this->send_get_tables($db_name, $pattern);
    @@ -1606,6 +1668,69 @@ class ThriftHiveMetastoreClient extends
          throw new \Exception("append_partition failed: unknown result");
        }

    + public function append_partition_with_environment_context($db_name, $tbl_name, $part_vals, \metastore\EnvironmentContext $environment_context)
    + {
    + $this->send_append_partition_with_environment_context($db_name, $tbl_name, $part_vals, $environment_context);
    + return $this->recv_append_partition_with_environment_context();
    + }
    +
    + public function send_append_partition_with_environment_context($db_name, $tbl_name, $part_vals, \metastore\EnvironmentContext $environment_context)
    + {
    + $args = new \metastore\ThriftHiveMetastore_append_partition_with_environment_context_args();
    + $args->db_name = $db_name;
    + $args->tbl_name = $tbl_name;
    + $args->part_vals = $part_vals;
    + $args->environment_context = $environment_context;
    + $bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
    + if ($bin_accel)
    + {
    + thrift_protocol_write_binary($this->output_, 'append_partition_with_environment_context', TMessageType::CALL, $args, $this->seqid_, $this->output_->isStrictWrite());
    + }
    + else
    + {
    + $this->output_->writeMessageBegin('append_partition_with_environment_context', TMessageType::CALL, $this->seqid_);
    + $args->write($this->output_);
    + $this->output_->writeMessageEnd();
    + $this->output_->getTransport()->flush();
    + }
    + }
    +
    + public function recv_append_partition_with_environment_context()
    + {
    + $bin_accel = ($this->input_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_read_binary');
    + if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, '\metastore\ThriftHiveMetastore_append_partition_with_environment_context_result', $this->input_->isStrictRead());
    + else
    + {
    + $rseqid = 0;
    + $fname = null;
    + $mtype = 0;
    +
    + $this->input_->readMessageBegin($fname, $mtype, $rseqid);
    + if ($mtype == TMessageType::EXCEPTION) {
    + $x = new TApplicationException();
    + $x->read($this->input_);
    + $this->input_->readMessageEnd();
    + throw $x;
    + }
    + $result = new \metastore\ThriftHiveMetastore_append_partition_with_environment_context_result();
    + $result->read($this->input_);
    + $this->input_->readMessageEnd();
    + }
    + if ($result->success !== null) {
    + return $result->success;
    + }
    + if ($result->o1 !== null) {
    + throw $result->o1;
    + }
    + if ($result->o2 !== null) {
    + throw $result->o2;
    + }
    + if ($result->o3 !== null) {
    + throw $result->o3;
    + }
    + throw new \Exception("append_partition_with_environment_context failed: unknown result");
    + }
    +
        public function append_partition_by_name($db_name, $tbl_name, $part_name)
        {
          $this->send_append_partition_by_name($db_name, $tbl_name, $part_name);
    @@ -1668,6 +1793,69 @@ class ThriftHiveMetastoreClient extends
          throw new \Exception("append_partition_by_name failed: unknown result");
        }

    + public function append_partition_by_name_with_environment_context($db_name, $tbl_name, $part_name, \metastore\EnvironmentContext $environment_context)
    + {
    + $this->send_append_partition_by_name_with_environment_context($db_name, $tbl_name, $part_name, $environment_context);
    + return $this->recv_append_partition_by_name_with_environment_context();
    + }
    +
    + public function send_append_partition_by_name_with_environment_context($db_name, $tbl_name, $part_name, \metastore\EnvironmentContext $environment_context)
    + {
    + $args = new \metastore\ThriftHiveMetastore_append_partition_by_name_with_environment_context_args();
    + $args->db_name = $db_name;
    + $args->tbl_name = $tbl_name;
    + $args->part_name = $part_name;
    + $args->environment_context = $environment_context;
    + $bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
    + if ($bin_accel)
    + {
    + thrift_protocol_write_binary($this->output_, 'append_partition_by_name_with_environment_context', TMessageType::CALL, $args, $this->seqid_, $this->output_->isStrictWrite());
    + }
    + else
    + {
    + $this->output_->writeMessageBegin('append_partition_by_name_with_environment_context', TMessageType::CALL, $this->seqid_);
    + $args->write($this->output_);
    + $this->output_->writeMessageEnd();
    + $this->output_->getTransport()->flush();
    + }
    + }
    +
    + public function recv_append_partition_by_name_with_environment_context()
    + {
    + $bin_accel = ($this->input_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_read_binary');
    + if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, '\metastore\ThriftHiveMetastore_append_partition_by_name_with_environment_context_result', $this->input_->isStrictRead());
    + else
    + {
    + $rseqid = 0;
    + $fname = null;
    + $mtype = 0;
    +
    + $this->input_->readMessageBegin($fname, $mtype, $rseqid);
    + if ($mtype == TMessageType::EXCEPTION) {
    + $x = new TApplicationException();
    + $x->read($this->input_);
    + $this->input_->readMessageEnd();
    + throw $x;
    + }
    + $result = new \metastore\ThriftHiveMetastore_append_partition_by_name_with_environment_context_result();
    + $result->read($this->input_);
    + $this->input_->readMessageEnd();
    + }
    + if ($result->success !== null) {
    + return $result->success;
    + }
    + if ($result->o1 !== null) {
    + throw $result->o1;
    + }
    + if ($result->o2 !== null) {
    + throw $result->o2;
    + }
    + if ($result->o3 !== null) {
    + throw $result->o3;
    + }
    + throw new \Exception("append_partition_by_name_with_environment_context failed: unknown result");
    + }
    +
        public function drop_partition($db_name, $tbl_name, $part_vals, $deleteData)
        {
          $this->send_drop_partition($db_name, $tbl_name, $part_vals, $deleteData);
    @@ -1728,6 +1916,67 @@ class ThriftHiveMetastoreClient extends
          throw new \Exception("drop_partition failed: unknown result");
        }

    + public function drop_partition_with_environment_context($db_name, $tbl_name, $part_vals, $deleteData, \metastore\EnvironmentContext $environment_context)
    + {
    + $this->send_drop_partition_with_environment_context($db_name, $tbl_name, $part_vals, $deleteData, $environment_context);
    + return $this->recv_drop_partition_with_environment_context();
    + }
    +
    + public function send_drop_partition_with_environment_context($db_name, $tbl_name, $part_vals, $deleteData, \metastore\EnvironmentContext $environment_context)
    + {
    + $args = new \metastore\ThriftHiveMetastore_drop_partition_with_environment_context_args();
    + $args->db_name = $db_name;
    + $args->tbl_name = $tbl_name;
    + $args->part_vals = $part_vals;
    + $args->deleteData = $deleteData;
    + $args->environment_context = $environment_context;
    + $bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
    + if ($bin_accel)
    + {
    + thrift_protocol_write_binary($this->output_, 'drop_partition_with_environment_context', TMessageType::CALL, $args, $this->seqid_, $this->output_->isStrictWrite());
    + }
    + else
    + {
    + $this->output_->writeMessageBegin('drop_partition_with_environment_context', TMessageType::CALL, $this->seqid_);
    + $args->write($this->output_);
    + $this->output_->writeMessageEnd();
    + $this->output_->getTransport()->flush();
    + }
    + }
    +
    + public function recv_drop_partition_with_environment_context()
    + {
    + $bin_accel = ($this->input_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_read_binary');
    + if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, '\metastore\ThriftHiveMetastore_drop_partition_with_environment_context_result', $this->input_->isStrictRead());
    + else
    + {
    + $rseqid = 0;
    + $fname = null;
    + $mtype = 0;
    +
    + $this->input_->readMessageBegin($fname, $mtype, $rseqid);
    + if ($mtype == TMessageType::EXCEPTION) {
    + $x = new TApplicationException();
    + $x->read($this->input_);
    + $this->input_->readMessageEnd();
    + throw $x;
    + }
    + $result = new \metastore\ThriftHiveMetastore_drop_partition_with_environment_context_result();
    + $result->read($this->input_);
    + $this->input_->readMessageEnd();
    + }
    + if ($result->success !== null) {
    + return $result->success;
    + }
    + if ($result->o1 !== null) {
    + throw $result->o1;
    + }
    + if ($result->o2 !== null) {
    + throw $result->o2;
    + }
    + throw new \Exception("drop_partition_with_environment_context failed: unknown result");
    + }
    +
        public function drop_partition_by_name($db_name, $tbl_name, $part_name, $deleteData)
        {
          $this->send_drop_partition_by_name($db_name, $tbl_name, $part_name, $deleteData);
    @@ -1788,6 +2037,67 @@ class ThriftHiveMetastoreClient extends
          throw new \Exception("drop_partition_by_name failed: unknown result");
        }

    + public function drop_partition_by_name_with_environment_context($db_name, $tbl_name, $part_name, $deleteData, \metastore\EnvironmentContext $environment_context)
    + {
    + $this->send_drop_partition_by_name_with_environment_context($db_name, $tbl_name, $part_name, $deleteData, $environment_context);
    + return $this->recv_drop_partition_by_name_with_environment_context();
    + }
    +
    + public function send_drop_partition_by_name_with_environment_context($db_name, $tbl_name, $part_name, $deleteData, \metastore\EnvironmentContext $environment_context)
    + {
    + $args = new \metastore\ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args();
    + $args->db_name = $db_name;
    + $args->tbl_name = $tbl_name;
    + $args->part_name = $part_name;
    + $args->deleteData = $deleteData;
    + $args->environment_context = $environment_context;
    + $bin_accel = ($this->output_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_write_binary');
    + if ($bin_accel)
    + {
    + thrift_protocol_write_binary($this->output_, 'drop_partition_by_name_with_environment_context', TMessageType::CALL, $args, $this->seqid_, $this->output_->isStrictWrite());
    + }
    + else
    + {
    + $this->output_->writeMessageBegin('drop_partition_by_name_with_environment_context', TMessageType::CALL, $this->seqid_);
    + $args->write($this->output_);
    + $this->output_->writeMessageEnd();
    + $this->output_->getTransport()->flush();
    + }
    + }
    +
    + public function recv_drop_partition_by_name_with_environment_context()
    + {
    + $bin_accel = ($this->input_ instanceof TProtocol::$TBINARYPROTOCOLACCELERATED) && function_exists('thrift_protocol_read_binary');
    + if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, '\metastore\ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result', $this->input_->isStrictRead());
    + else
    + {
    + $rseqid = 0;
    + $fname = null;
    + $mtype = 0;
    +
    + $this->input_->readMessageBegin($fname, $mtype, $rseqid);
    + if ($mtype == TMessageType::EXCEPTION) {
    + $x = new TApplicationException();
    + $x->read($this->input_);
    + $this->input_->readMessageEnd();
    + throw $x;
    + }
    + $result = new \metastore\ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result();
    + $result->read($this->input_);
    + $this->input_->readMessageEnd();
    + }
    + if ($result->success !== null) {
    + return $result->success;
    + }
    + if ($result->o1 !== null) {
    + throw $result->o1;
    + }
    + if ($result->o2 !== null) {
    + throw $result->o2;
    + }
    + throw new \Exception("drop_partition_by_name_with_environment_context failed: unknown result");
    + }
    +
        public function get_partition($db_name, $tbl_name, $part_vals)
        {
          $this->send_get_partition($db_name, $tbl_name, $part_vals);
    @@ -7638,13 +7948,246 @@ class ThriftHiveMetastore_drop_table_res

      }

    -class ThriftHiveMetastore_get_tables_args {
    +class ThriftHiveMetastore_drop_table_with_environment_context_args {
        static $_TSPEC;

    - public $db_name = null;
    - public $pattern = null;
    -
    - public function __construct($vals=null) {
    + public $dbname = null;
    + public $name = null;
    + public $deleteData = null;
    + public $environment_context = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 1 => array(
    + 'var' => 'dbname',
    + 'type' => TType::STRING,
    + ),
    + 2 => array(
    + 'var' => 'name',
    + 'type' => TType::STRING,
    + ),
    + 3 => array(
    + 'var' => 'deleteData',
    + 'type' => TType::BOOL,
    + ),
    + 4 => array(
    + 'var' => 'environment_context',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\EnvironmentContext',
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['dbname'])) {
    + $this->dbname = $vals['dbname'];
    + }
    + if (isset($vals['name'])) {
    + $this->name = $vals['name'];
    + }
    + if (isset($vals['deleteData'])) {
    + $this->deleteData = $vals['deleteData'];
    + }
    + if (isset($vals['environment_context'])) {
    + $this->environment_context = $vals['environment_context'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'ThriftHiveMetastore_drop_table_with_environment_context_args';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 1:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->dbname);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 3:
    + if ($ftype == TType::BOOL) {
    + $xfer += $input->readBool($this->deleteData);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 4:
    + if ($ftype == TType::STRUCT) {
    + $this->environment_context = new \metastore\EnvironmentContext();
    + $xfer += $this->environment_context->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_table_with_environment_context_args');
    + if ($this->dbname !== null) {
    + $xfer += $output->writeFieldBegin('dbname', TType::STRING, 1);
    + $xfer += $output->writeString($this->dbname);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->name !== null) {
    + $xfer += $output->writeFieldBegin('name', TType::STRING, 2);
    + $xfer += $output->writeString($this->name);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->deleteData !== null) {
    + $xfer += $output->writeFieldBegin('deleteData', TType::BOOL, 3);
    + $xfer += $output->writeBool($this->deleteData);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->environment_context !== null) {
    + if (!is_object($this->environment_context)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('environment_context', TType::STRUCT, 4);
    + $xfer += $this->environment_context->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class ThriftHiveMetastore_drop_table_with_environment_context_result {
    + static $_TSPEC;
    +
    + public $o1 = null;
    + public $o3 = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 1 => array(
    + 'var' => 'o1',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\NoSuchObjectException',
    + ),
    + 2 => array(
    + 'var' => 'o3',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\MetaException',
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['o1'])) {
    + $this->o1 = $vals['o1'];
    + }
    + if (isset($vals['o3'])) {
    + $this->o3 = $vals['o3'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'ThriftHiveMetastore_drop_table_with_environment_context_result';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 1:
    + if ($ftype == TType::STRUCT) {
    + $this->o1 = new \metastore\NoSuchObjectException();
    + $xfer += $this->o1->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRUCT) {
    + $this->o3 = new \metastore\MetaException();
    + $xfer += $this->o3->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_table_with_environment_context_result');
    + if ($this->o1 !== null) {
    + $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
    + $xfer += $this->o1->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o3 !== null) {
    + $xfer += $output->writeFieldBegin('o3', TType::STRUCT, 2);
    + $xfer += $this->o3->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class ThriftHiveMetastore_get_tables_args {
    + static $_TSPEC;
    +
    + public $db_name = null;
    + public $pattern = null;
    +
    + public function __construct($vals=null) {
          if (!isset(self::$_TSPEC)) {
            self::$_TSPEC = array(
              1 => array(
    @@ -10250,12 +10793,13 @@ class ThriftHiveMetastore_append_partiti

      }

    -class ThriftHiveMetastore_append_partition_by_name_args {
    +class ThriftHiveMetastore_append_partition_with_environment_context_args {
        static $_TSPEC;

        public $db_name = null;
        public $tbl_name = null;
    - public $part_name = null;
    + public $part_vals = null;
    + public $environment_context = null;

        public function __construct($vals=null) {
          if (!isset(self::$_TSPEC)) {
    @@ -10269,8 +10813,17 @@ class ThriftHiveMetastore_append_partiti
                'type' => TType::STRING,
                ),
              3 => array(
    - 'var' => 'part_name',
    - 'type' => TType::STRING,
    + 'var' => 'part_vals',
    + 'type' => TType::LST,
    + 'etype' => TType::STRING,
    + 'elem' => array(
    + 'type' => TType::STRING,
    + ),
    + ),
    + 4 => array(
    + 'var' => 'environment_context',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\EnvironmentContext',
                ),
              );
          }
    @@ -10281,8 +10834,304 @@ class ThriftHiveMetastore_append_partiti
            if (isset($vals['tbl_name'])) {
              $this->tbl_name = $vals['tbl_name'];
            }
    - if (isset($vals['part_name'])) {
    - $this->part_name = $vals['part_name'];
    + if (isset($vals['part_vals'])) {
    + $this->part_vals = $vals['part_vals'];
    + }
    + if (isset($vals['environment_context'])) {
    + $this->environment_context = $vals['environment_context'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'ThriftHiveMetastore_append_partition_with_environment_context_args';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 1:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->db_name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->tbl_name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 3:
    + if ($ftype == TType::LST) {
    + $this->part_vals = array();
    + $_size313 = 0;
    + $_etype316 = 0;
    + $xfer += $input->readListBegin($_etype316, $_size313);
    + for ($_i317 = 0; $_i317 < $_size313; ++$_i317)
    + {
    + $elem318 = null;
    + $xfer += $input->readString($elem318);
    + $this->part_vals []= $elem318;
    + }
    + $xfer += $input->readListEnd();
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 4:
    + if ($ftype == TType::STRUCT) {
    + $this->environment_context = new \metastore\EnvironmentContext();
    + $xfer += $this->environment_context->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_append_partition_with_environment_context_args');
    + if ($this->db_name !== null) {
    + $xfer += $output->writeFieldBegin('db_name', TType::STRING, 1);
    + $xfer += $output->writeString($this->db_name);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->tbl_name !== null) {
    + $xfer += $output->writeFieldBegin('tbl_name', TType::STRING, 2);
    + $xfer += $output->writeString($this->tbl_name);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->part_vals !== null) {
    + if (!is_array($this->part_vals)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('part_vals', TType::LST, 3);
    + {
    + $output->writeListBegin(TType::STRING, count($this->part_vals));
    + {
    + foreach ($this->part_vals as $iter319)
    + {
    + $xfer += $output->writeString($iter319);
    + }
    + }
    + $output->writeListEnd();
    + }
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->environment_context !== null) {
    + if (!is_object($this->environment_context)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('environment_context', TType::STRUCT, 4);
    + $xfer += $this->environment_context->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class ThriftHiveMetastore_append_partition_with_environment_context_result {
    + static $_TSPEC;
    +
    + public $success = null;
    + public $o1 = null;
    + public $o2 = null;
    + public $o3 = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 0 => array(
    + 'var' => 'success',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\Partition',
    + ),
    + 1 => array(
    + 'var' => 'o1',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\InvalidObjectException',
    + ),
    + 2 => array(
    + 'var' => 'o2',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\AlreadyExistsException',
    + ),
    + 3 => array(
    + 'var' => 'o3',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\MetaException',
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['success'])) {
    + $this->success = $vals['success'];
    + }
    + if (isset($vals['o1'])) {
    + $this->o1 = $vals['o1'];
    + }
    + if (isset($vals['o2'])) {
    + $this->o2 = $vals['o2'];
    + }
    + if (isset($vals['o3'])) {
    + $this->o3 = $vals['o3'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'ThriftHiveMetastore_append_partition_with_environment_context_result';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 0:
    + if ($ftype == TType::STRUCT) {
    + $this->success = new \metastore\Partition();
    + $xfer += $this->success->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 1:
    + if ($ftype == TType::STRUCT) {
    + $this->o1 = new \metastore\InvalidObjectException();
    + $xfer += $this->o1->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRUCT) {
    + $this->o2 = new \metastore\AlreadyExistsException();
    + $xfer += $this->o2->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 3:
    + if ($ftype == TType::STRUCT) {
    + $this->o3 = new \metastore\MetaException();
    + $xfer += $this->o3->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_append_partition_with_environment_context_result');
    + if ($this->success !== null) {
    + if (!is_object($this->success)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('success', TType::STRUCT, 0);
    + $xfer += $this->success->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o1 !== null) {
    + $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
    + $xfer += $this->o1->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o2 !== null) {
    + $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
    + $xfer += $this->o2->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o3 !== null) {
    + $xfer += $output->writeFieldBegin('o3', TType::STRUCT, 3);
    + $xfer += $this->o3->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class ThriftHiveMetastore_append_partition_by_name_args {
    + static $_TSPEC;
    +
    + public $db_name = null;
    + public $tbl_name = null;
    + public $part_name = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 1 => array(
    + 'var' => 'db_name',
    + 'type' => TType::STRING,
    + ),
    + 2 => array(
    + 'var' => 'tbl_name',
    + 'type' => TType::STRING,
    + ),
    + 3 => array(
    + 'var' => 'part_name',
    + 'type' => TType::STRING,
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['db_name'])) {
    + $this->db_name = $vals['db_name'];
    + }
    + if (isset($vals['tbl_name'])) {
    + $this->tbl_name = $vals['tbl_name'];
    + }
    + if (isset($vals['part_name'])) {
    + $this->part_name = $vals['part_name'];
            }
          }
        }
    @@ -10321,8 +11170,863 @@ class ThriftHiveMetastore_append_partiti
                }
                break;
              case 3:
    - if ($ftype == TType::STRING) {
    - $xfer += $input->readString($this->part_name);
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->part_name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_append_partition_by_name_args');
    + if ($this->db_name !== null) {
    + $xfer += $output->writeFieldBegin('db_name', TType::STRING, 1);
    + $xfer += $output->writeString($this->db_name);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->tbl_name !== null) {
    + $xfer += $output->writeFieldBegin('tbl_name', TType::STRING, 2);
    + $xfer += $output->writeString($this->tbl_name);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->part_name !== null) {
    + $xfer += $output->writeFieldBegin('part_name', TType::STRING, 3);
    + $xfer += $output->writeString($this->part_name);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class ThriftHiveMetastore_append_partition_by_name_result {
    + static $_TSPEC;
    +
    + public $success = null;
    + public $o1 = null;
    + public $o2 = null;
    + public $o3 = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 0 => array(
    + 'var' => 'success',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\Partition',
    + ),
    + 1 => array(
    + 'var' => 'o1',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\InvalidObjectException',
    + ),
    + 2 => array(
    + 'var' => 'o2',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\AlreadyExistsException',
    + ),
    + 3 => array(
    + 'var' => 'o3',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\MetaException',
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['success'])) {
    + $this->success = $vals['success'];
    + }
    + if (isset($vals['o1'])) {
    + $this->o1 = $vals['o1'];
    + }
    + if (isset($vals['o2'])) {
    + $this->o2 = $vals['o2'];
    + }
    + if (isset($vals['o3'])) {
    + $this->o3 = $vals['o3'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'ThriftHiveMetastore_append_partition_by_name_result';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 0:
    + if ($ftype == TType::STRUCT) {
    + $this->success = new \metastore\Partition();
    + $xfer += $this->success->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 1:
    + if ($ftype == TType::STRUCT) {
    + $this->o1 = new \metastore\InvalidObjectException();
    + $xfer += $this->o1->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRUCT) {
    + $this->o2 = new \metastore\AlreadyExistsException();
    + $xfer += $this->o2->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 3:
    + if ($ftype == TType::STRUCT) {
    + $this->o3 = new \metastore\MetaException();
    + $xfer += $this->o3->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_append_partition_by_name_result');
    + if ($this->success !== null) {
    + if (!is_object($this->success)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('success', TType::STRUCT, 0);
    + $xfer += $this->success->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o1 !== null) {
    + $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
    + $xfer += $this->o1->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o2 !== null) {
    + $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
    + $xfer += $this->o2->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o3 !== null) {
    + $xfer += $output->writeFieldBegin('o3', TType::STRUCT, 3);
    + $xfer += $this->o3->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class ThriftHiveMetastore_append_partition_by_name_with_environment_context_args {
    + static $_TSPEC;
    +
    + public $db_name = null;
    + public $tbl_name = null;
    + public $part_name = null;
    + public $environment_context = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 1 => array(
    + 'var' => 'db_name',
    + 'type' => TType::STRING,
    + ),
    + 2 => array(
    + 'var' => 'tbl_name',
    + 'type' => TType::STRING,
    + ),
    + 3 => array(
    + 'var' => 'part_name',
    + 'type' => TType::STRING,
    + ),
    + 4 => array(
    + 'var' => 'environment_context',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\EnvironmentContext',
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['db_name'])) {
    + $this->db_name = $vals['db_name'];
    + }
    + if (isset($vals['tbl_name'])) {
    + $this->tbl_name = $vals['tbl_name'];
    + }
    + if (isset($vals['part_name'])) {
    + $this->part_name = $vals['part_name'];
    + }
    + if (isset($vals['environment_context'])) {
    + $this->environment_context = $vals['environment_context'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'ThriftHiveMetastore_append_partition_by_name_with_environment_context_args';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 1:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->db_name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->tbl_name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 3:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->part_name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 4:
    + if ($ftype == TType::STRUCT) {
    + $this->environment_context = new \metastore\EnvironmentContext();
    + $xfer += $this->environment_context->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_append_partition_by_name_with_environment_context_args');
    + if ($this->db_name !== null) {
    + $xfer += $output->writeFieldBegin('db_name', TType::STRING, 1);
    + $xfer += $output->writeString($this->db_name);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->tbl_name !== null) {
    + $xfer += $output->writeFieldBegin('tbl_name', TType::STRING, 2);
    + $xfer += $output->writeString($this->tbl_name);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->part_name !== null) {
    + $xfer += $output->writeFieldBegin('part_name', TType::STRING, 3);
    + $xfer += $output->writeString($this->part_name);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->environment_context !== null) {
    + if (!is_object($this->environment_context)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('environment_context', TType::STRUCT, 4);
    + $xfer += $this->environment_context->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class ThriftHiveMetastore_append_partition_by_name_with_environment_context_result {
    + static $_TSPEC;
    +
    + public $success = null;
    + public $o1 = null;
    + public $o2 = null;
    + public $o3 = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 0 => array(
    + 'var' => 'success',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\Partition',
    + ),
    + 1 => array(
    + 'var' => 'o1',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\InvalidObjectException',
    + ),
    + 2 => array(
    + 'var' => 'o2',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\AlreadyExistsException',
    + ),
    + 3 => array(
    + 'var' => 'o3',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\MetaException',
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['success'])) {
    + $this->success = $vals['success'];
    + }
    + if (isset($vals['o1'])) {
    + $this->o1 = $vals['o1'];
    + }
    + if (isset($vals['o2'])) {
    + $this->o2 = $vals['o2'];
    + }
    + if (isset($vals['o3'])) {
    + $this->o3 = $vals['o3'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'ThriftHiveMetastore_append_partition_by_name_with_environment_context_result';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 0:
    + if ($ftype == TType::STRUCT) {
    + $this->success = new \metastore\Partition();
    + $xfer += $this->success->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 1:
    + if ($ftype == TType::STRUCT) {
    + $this->o1 = new \metastore\InvalidObjectException();
    + $xfer += $this->o1->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRUCT) {
    + $this->o2 = new \metastore\AlreadyExistsException();
    + $xfer += $this->o2->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 3:
    + if ($ftype == TType::STRUCT) {
    + $this->o3 = new \metastore\MetaException();
    + $xfer += $this->o3->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_append_partition_by_name_with_environment_context_result');
    + if ($this->success !== null) {
    + if (!is_object($this->success)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('success', TType::STRUCT, 0);
    + $xfer += $this->success->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o1 !== null) {
    + $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
    + $xfer += $this->o1->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o2 !== null) {
    + $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
    + $xfer += $this->o2->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o3 !== null) {
    + $xfer += $output->writeFieldBegin('o3', TType::STRUCT, 3);
    + $xfer += $this->o3->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class ThriftHiveMetastore_drop_partition_args {
    + static $_TSPEC;
    +
    + public $db_name = null;
    + public $tbl_name = null;
    + public $part_vals = null;
    + public $deleteData = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 1 => array(
    + 'var' => 'db_name',
    + 'type' => TType::STRING,
    + ),
    + 2 => array(
    + 'var' => 'tbl_name',
    + 'type' => TType::STRING,
    + ),
    + 3 => array(
    + 'var' => 'part_vals',
    + 'type' => TType::LST,
    + 'etype' => TType::STRING,
    + 'elem' => array(
    + 'type' => TType::STRING,
    + ),
    + ),
    + 4 => array(
    + 'var' => 'deleteData',
    + 'type' => TType::BOOL,
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['db_name'])) {
    + $this->db_name = $vals['db_name'];
    + }
    + if (isset($vals['tbl_name'])) {
    + $this->tbl_name = $vals['tbl_name'];
    + }
    + if (isset($vals['part_vals'])) {
    + $this->part_vals = $vals['part_vals'];
    + }
    + if (isset($vals['deleteData'])) {
    + $this->deleteData = $vals['deleteData'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'ThriftHiveMetastore_drop_partition_args';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 1:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->db_name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->tbl_name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 3:
    + if ($ftype == TType::LST) {
    + $this->part_vals = array();
    + $_size320 = 0;
    + $_etype323 = 0;
    + $xfer += $input->readListBegin($_etype323, $_size320);
    + for ($_i324 = 0; $_i324 < $_size320; ++$_i324)
    + {
    + $elem325 = null;
    + $xfer += $input->readString($elem325);
    + $this->part_vals []= $elem325;
    + }
    + $xfer += $input->readListEnd();
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 4:
    + if ($ftype == TType::BOOL) {
    + $xfer += $input->readBool($this->deleteData);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_args');
    + if ($this->db_name !== null) {
    + $xfer += $output->writeFieldBegin('db_name', TType::STRING, 1);
    + $xfer += $output->writeString($this->db_name);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->tbl_name !== null) {
    + $xfer += $output->writeFieldBegin('tbl_name', TType::STRING, 2);
    + $xfer += $output->writeString($this->tbl_name);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->part_vals !== null) {
    + if (!is_array($this->part_vals)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('part_vals', TType::LST, 3);
    + {
    + $output->writeListBegin(TType::STRING, count($this->part_vals));
    + {
    + foreach ($this->part_vals as $iter326)
    + {
    + $xfer += $output->writeString($iter326);
    + }
    + }
    + $output->writeListEnd();
    + }
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->deleteData !== null) {
    + $xfer += $output->writeFieldBegin('deleteData', TType::BOOL, 4);
    + $xfer += $output->writeBool($this->deleteData);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class ThriftHiveMetastore_drop_partition_result {
    + static $_TSPEC;
    +
    + public $success = null;
    + public $o1 = null;
    + public $o2 = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 0 => array(
    + 'var' => 'success',
    + 'type' => TType::BOOL,
    + ),
    + 1 => array(
    + 'var' => 'o1',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\NoSuchObjectException',
    + ),
    + 2 => array(
    + 'var' => 'o2',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\MetaException',
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['success'])) {
    + $this->success = $vals['success'];
    + }
    + if (isset($vals['o1'])) {
    + $this->o1 = $vals['o1'];
    + }
    + if (isset($vals['o2'])) {
    + $this->o2 = $vals['o2'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'ThriftHiveMetastore_drop_partition_result';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 0:
    + if ($ftype == TType::BOOL) {
    + $xfer += $input->readBool($this->success);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 1:
    + if ($ftype == TType::STRUCT) {
    + $this->o1 = new \metastore\NoSuchObjectException();
    + $xfer += $this->o1->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRUCT) {
    + $this->o2 = new \metastore\MetaException();
    + $xfer += $this->o2->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + default:
    + $xfer += $input->skip($ftype);
    + break;
    + }
    + $xfer += $input->readFieldEnd();
    + }
    + $xfer += $input->readStructEnd();
    + return $xfer;
    + }
    +
    + public function write($output) {
    + $xfer = 0;
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_result');
    + if ($this->success !== null) {
    + $xfer += $output->writeFieldBegin('success', TType::BOOL, 0);
    + $xfer += $output->writeBool($this->success);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o1 !== null) {
    + $xfer += $output->writeFieldBegin('o1', TType::STRUCT, 1);
    + $xfer += $this->o1->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->o2 !== null) {
    + $xfer += $output->writeFieldBegin('o2', TType::STRUCT, 2);
    + $xfer += $this->o2->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
    + $xfer += $output->writeFieldStop();
    + $xfer += $output->writeStructEnd();
    + return $xfer;
    + }
    +
    +}
    +
    +class ThriftHiveMetastore_drop_partition_with_environment_context_args {
    + static $_TSPEC;
    +
    + public $db_name = null;
    + public $tbl_name = null;
    + public $part_vals = null;
    + public $deleteData = null;
    + public $environment_context = null;
    +
    + public function __construct($vals=null) {
    + if (!isset(self::$_TSPEC)) {
    + self::$_TSPEC = array(
    + 1 => array(
    + 'var' => 'db_name',
    + 'type' => TType::STRING,
    + ),
    + 2 => array(
    + 'var' => 'tbl_name',
    + 'type' => TType::STRING,
    + ),
    + 3 => array(
    + 'var' => 'part_vals',
    + 'type' => TType::LST,
    + 'etype' => TType::STRING,
    + 'elem' => array(
    + 'type' => TType::STRING,
    + ),
    + ),
    + 4 => array(
    + 'var' => 'deleteData',
    + 'type' => TType::BOOL,
    + ),
    + 5 => array(
    + 'var' => 'environment_context',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\EnvironmentContext',
    + ),
    + );
    + }
    + if (is_array($vals)) {
    + if (isset($vals['db_name'])) {
    + $this->db_name = $vals['db_name'];
    + }
    + if (isset($vals['tbl_name'])) {
    + $this->tbl_name = $vals['tbl_name'];
    + }
    + if (isset($vals['part_vals'])) {
    + $this->part_vals = $vals['part_vals'];
    + }
    + if (isset($vals['deleteData'])) {
    + $this->deleteData = $vals['deleteData'];
    + }
    + if (isset($vals['environment_context'])) {
    + $this->environment_context = $vals['environment_context'];
    + }
    + }
    + }
    +
    + public function getName() {
    + return 'ThriftHiveMetastore_drop_partition_with_environment_context_args';
    + }
    +
    + public function read($input)
    + {
    + $xfer = 0;
    + $fname = null;
    + $ftype = 0;
    + $fid = 0;
    + $xfer += $input->readStructBegin($fname);
    + while (true)
    + {
    + $xfer += $input->readFieldBegin($fname, $ftype, $fid);
    + if ($ftype == TType::STOP) {
    + break;
    + }
    + switch ($fid)
    + {
    + case 1:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->db_name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 2:
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->tbl_name);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 3:
    + if ($ftype == TType::LST) {
    + $this->part_vals = array();
    + $_size327 = 0;
    + $_etype330 = 0;
    + $xfer += $input->readListBegin($_etype330, $_size327);
    + for ($_i331 = 0; $_i331 < $_size327; ++$_i331)
    + {
    + $elem332 = null;
    + $xfer += $input->readString($elem332);
    + $this->part_vals []= $elem332;
    + }
    + $xfer += $input->readListEnd();
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 4:
    + if ($ftype == TType::BOOL) {
    + $xfer += $input->readBool($this->deleteData);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
    + case 5:
    + if ($ftype == TType::STRUCT) {
    + $this->environment_context = new \metastore\EnvironmentContext();
    + $xfer += $this->environment_context->read($input);
                } else {
                  $xfer += $input->skip($ftype);
                }
    @@ -10339,7 +12043,7 @@ class ThriftHiveMetastore_append_partiti

        public function write($output) {
          $xfer = 0;
    - $xfer += $output->writeStructBegin('ThriftHiveMetastore_append_partition_by_name_args');
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_with_environment_context_args');
          if ($this->db_name !== null) {
            $xfer += $output->writeFieldBegin('db_name', TType::STRING, 1);
            $xfer += $output->writeString($this->db_name);
    @@ -10350,9 +12054,34 @@ class ThriftHiveMetastore_append_partiti
            $xfer += $output->writeString($this->tbl_name);
            $xfer += $output->writeFieldEnd();
          }
    - if ($this->part_name !== null) {
    - $xfer += $output->writeFieldBegin('part_name', TType::STRING, 3);
    - $xfer += $output->writeString($this->part_name);
    + if ($this->part_vals !== null) {
    + if (!is_array($this->part_vals)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('part_vals', TType::LST, 3);
    + {
    + $output->writeListBegin(TType::STRING, count($this->part_vals));
    + {
    + foreach ($this->part_vals as $iter333)
    + {
    + $xfer += $output->writeString($iter333);
    + }
    + }
    + $output->writeListEnd();
    + }
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->deleteData !== null) {
    + $xfer += $output->writeFieldBegin('deleteData', TType::BOOL, 4);
    + $xfer += $output->writeBool($this->deleteData);
    + $xfer += $output->writeFieldEnd();
    + }
    + if ($this->environment_context !== null) {
    + if (!is_object($this->environment_context)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('environment_context', TType::STRUCT, 5);
    + $xfer += $this->environment_context->write($output);
            $xfer += $output->writeFieldEnd();
          }
          $xfer += $output->writeFieldStop();
    @@ -10362,35 +12091,28 @@ class ThriftHiveMetastore_append_partiti

      }

    -class ThriftHiveMetastore_append_partition_by_name_result {
    +class ThriftHiveMetastore_drop_partition_with_environment_context_result {
        static $_TSPEC;

        public $success = null;
        public $o1 = null;
        public $o2 = null;
    - public $o3 = null;

        public function __construct($vals=null) {
          if (!isset(self::$_TSPEC)) {
            self::$_TSPEC = array(
              0 => array(
                'var' => 'success',
    - 'type' => TType::STRUCT,
    - 'class' => '\metastore\Partition',
    + 'type' => TType::BOOL,
                ),
              1 => array(
                'var' => 'o1',
                'type' => TType::STRUCT,
    - 'class' => '\metastore\InvalidObjectException',
    + 'class' => '\metastore\NoSuchObjectException',
                ),
              2 => array(
                'var' => 'o2',
                'type' => TType::STRUCT,
    - 'class' => '\metastore\AlreadyExistsException',
    - ),
    - 3 => array(
    - 'var' => 'o3',
    - 'type' => TType::STRUCT,
                'class' => '\metastore\MetaException',
                ),
              );
    @@ -10405,14 +12127,11 @@ class ThriftHiveMetastore_append_partiti
            if (isset($vals['o2'])) {
              $this->o2 = $vals['o2'];
            }
    - if (isset($vals['o3'])) {
    - $this->o3 = $vals['o3'];
    - }
          }
        }

        public function getName() {
    - return 'ThriftHiveMetastore_append_partition_by_name_result';
    + return 'ThriftHiveMetastore_drop_partition_with_environment_context_result';
        }

        public function read($input)
    @@ -10431,16 +12150,15 @@ class ThriftHiveMetastore_append_partiti
            switch ($fid)
            {
              case 0:
    - if ($ftype == TType::STRUCT) {
    - $this->success = new \metastore\Partition();
    - $xfer += $this->success->read($input);
    + if ($ftype == TType::BOOL) {
    + $xfer += $input->readBool($this->success);
                } else {
                  $xfer += $input->skip($ftype);
                }
                break;
              case 1:
                if ($ftype == TType::STRUCT) {
    - $this->o1 = new \metastore\InvalidObjectException();
    + $this->o1 = new \metastore\NoSuchObjectException();
                  $xfer += $this->o1->read($input);
                } else {
                  $xfer += $input->skip($ftype);
    @@ -10448,20 +12166,12 @@ class ThriftHiveMetastore_append_partiti
                break;
              case 2:
                if ($ftype == TType::STRUCT) {
    - $this->o2 = new \metastore\AlreadyExistsException();
    + $this->o2 = new \metastore\MetaException();
                  $xfer += $this->o2->read($input);
                } else {
                  $xfer += $input->skip($ftype);
                }
                break;
    - case 3:
    - if ($ftype == TType::STRUCT) {
    - $this->o3 = new \metastore\MetaException();
    - $xfer += $this->o3->read($input);
    - } else {
    - $xfer += $input->skip($ftype);
    - }
    - break;
              default:
                $xfer += $input->skip($ftype);
                break;
    @@ -10474,13 +12184,10 @@ class ThriftHiveMetastore_append_partiti

        public function write($output) {
          $xfer = 0;
    - $xfer += $output->writeStructBegin('ThriftHiveMetastore_append_partition_by_name_result');
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_with_environment_context_result');
          if ($this->success !== null) {
    - if (!is_object($this->success)) {
    - throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    - }
    - $xfer += $output->writeFieldBegin('success', TType::STRUCT, 0);
    - $xfer += $this->success->write($output);
    + $xfer += $output->writeFieldBegin('success', TType::BOOL, 0);
    + $xfer += $output->writeBool($this->success);
            $xfer += $output->writeFieldEnd();
          }
          if ($this->o1 !== null) {
    @@ -10493,11 +12200,6 @@ class ThriftHiveMetastore_append_partiti
            $xfer += $this->o2->write($output);
            $xfer += $output->writeFieldEnd();
          }
    - if ($this->o3 !== null) {
    - $xfer += $output->writeFieldBegin('o3', TType::STRUCT, 3);
    - $xfer += $this->o3->write($output);
    - $xfer += $output->writeFieldEnd();
    - }
          $xfer += $output->writeFieldStop();
          $xfer += $output->writeStructEnd();
          return $xfer;
    @@ -10505,12 +12207,12 @@ class ThriftHiveMetastore_append_partiti

      }

    -class ThriftHiveMetastore_drop_partition_args {
    +class ThriftHiveMetastore_drop_partition_by_name_args {
        static $_TSPEC;

        public $db_name = null;
        public $tbl_name = null;
    - public $part_vals = null;
    + public $part_name = null;
        public $deleteData = null;

        public function __construct($vals=null) {
    @@ -10525,12 +12227,8 @@ class ThriftHiveMetastore_drop_partition
                'type' => TType::STRING,
                ),
              3 => array(
    - 'var' => 'part_vals',
    - 'type' => TType::LST,
    - 'etype' => TType::STRING,
    - 'elem' => array(
    - 'type' => TType::STRING,
    - ),
    + 'var' => 'part_name',
    + 'type' => TType::STRING,
                ),
              4 => array(
                'var' => 'deleteData',
    @@ -10545,8 +12243,8 @@ class ThriftHiveMetastore_drop_partition
            if (isset($vals['tbl_name'])) {
              $this->tbl_name = $vals['tbl_name'];
            }
    - if (isset($vals['part_vals'])) {
    - $this->part_vals = $vals['part_vals'];
    + if (isset($vals['part_name'])) {
    + $this->part_name = $vals['part_name'];
            }
            if (isset($vals['deleteData'])) {
              $this->deleteData = $vals['deleteData'];
    @@ -10555,7 +12253,7 @@ class ThriftHiveMetastore_drop_partition
        }

        public function getName() {
    - return 'ThriftHiveMetastore_drop_partition_args';
    + return 'ThriftHiveMetastore_drop_partition_by_name_args';
        }

        public function read($input)
    @@ -10588,18 +12286,8 @@ class ThriftHiveMetastore_drop_partition
                }
                break;
              case 3:
    - if ($ftype == TType::LST) {
    - $this->part_vals = array();
    - $_size313 = 0;
    - $_etype316 = 0;
    - $xfer += $input->readListBegin($_etype316, $_size313);
    - for ($_i317 = 0; $_i317 < $_size313; ++$_i317)
    - {
    - $elem318 = null;
    - $xfer += $input->readString($elem318);
    - $this->part_vals []= $elem318;
    - }
    - $xfer += $input->readListEnd();
    + if ($ftype == TType::STRING) {
    + $xfer += $input->readString($this->part_name);
                } else {
                  $xfer += $input->skip($ftype);
                }
    @@ -10623,7 +12311,7 @@ class ThriftHiveMetastore_drop_partition

        public function write($output) {
          $xfer = 0;
    - $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_args');
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_by_name_args');
          if ($this->db_name !== null) {
            $xfer += $output->writeFieldBegin('db_name', TType::STRING, 1);
            $xfer += $output->writeString($this->db_name);
    @@ -10634,21 +12322,9 @@ class ThriftHiveMetastore_drop_partition
            $xfer += $output->writeString($this->tbl_name);
            $xfer += $output->writeFieldEnd();
          }
    - if ($this->part_vals !== null) {
    - if (!is_array($this->part_vals)) {
    - throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    - }
    - $xfer += $output->writeFieldBegin('part_vals', TType::LST, 3);
    - {
    - $output->writeListBegin(TType::STRING, count($this->part_vals));
    - {
    - foreach ($this->part_vals as $iter319)
    - {
    - $xfer += $output->writeString($iter319);
    - }
    - }
    - $output->writeListEnd();
    - }
    + if ($this->part_name !== null) {
    + $xfer += $output->writeFieldBegin('part_name', TType::STRING, 3);
    + $xfer += $output->writeString($this->part_name);
            $xfer += $output->writeFieldEnd();
          }
          if ($this->deleteData !== null) {
    @@ -10663,7 +12339,7 @@ class ThriftHiveMetastore_drop_partition

      }

    -class ThriftHiveMetastore_drop_partition_result {
    +class ThriftHiveMetastore_drop_partition_by_name_result {
        static $_TSPEC;

        public $success = null;
    @@ -10703,7 +12379,7 @@ class ThriftHiveMetastore_drop_partition
        }

        public function getName() {
    - return 'ThriftHiveMetastore_drop_partition_result';
    + return 'ThriftHiveMetastore_drop_partition_by_name_result';
        }

        public function read($input)
    @@ -10756,7 +12432,7 @@ class ThriftHiveMetastore_drop_partition

        public function write($output) {
          $xfer = 0;
    - $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_result');
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_by_name_result');
          if ($this->success !== null) {
            $xfer += $output->writeFieldBegin('success', TType::BOOL, 0);
            $xfer += $output->writeBool($this->success);
    @@ -10779,13 +12455,14 @@ class ThriftHiveMetastore_drop_partition

      }

    -class ThriftHiveMetastore_drop_partition_by_name_args {
    +class ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args {
        static $_TSPEC;

        public $db_name = null;
        public $tbl_name = null;
        public $part_name = null;
        public $deleteData = null;
    + public $environment_context = null;

        public function __construct($vals=null) {
          if (!isset(self::$_TSPEC)) {
    @@ -10806,6 +12483,11 @@ class ThriftHiveMetastore_drop_partition
                'var' => 'deleteData',
                'type' => TType::BOOL,
                ),
    + 5 => array(
    + 'var' => 'environment_context',
    + 'type' => TType::STRUCT,
    + 'class' => '\metastore\EnvironmentContext',
    + ),
              );
          }
          if (is_array($vals)) {
    @@ -10821,11 +12503,14 @@ class ThriftHiveMetastore_drop_partition
            if (isset($vals['deleteData'])) {
              $this->deleteData = $vals['deleteData'];
            }
    + if (isset($vals['environment_context'])) {
    + $this->environment_context = $vals['environment_context'];
    + }
          }
        }

        public function getName() {
    - return 'ThriftHiveMetastore_drop_partition_by_name_args';
    + return 'ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args';
        }

        public function read($input)
    @@ -10871,6 +12556,14 @@ class ThriftHiveMetastore_drop_partition
                  $xfer += $input->skip($ftype);
                }
                break;
    + case 5:
    + if ($ftype == TType::STRUCT) {
    + $this->environment_context = new \metastore\EnvironmentContext();
    + $xfer += $this->environment_context->read($input);
    + } else {
    + $xfer += $input->skip($ftype);
    + }
    + break;
              default:
                $xfer += $input->skip($ftype);
                break;
    @@ -10883,7 +12576,7 @@ class ThriftHiveMetastore_drop_partition

        public function write($output) {
          $xfer = 0;
    - $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_by_name_args');
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args');
          if ($this->db_name !== null) {
            $xfer += $output->writeFieldBegin('db_name', TType::STRING, 1);
            $xfer += $output->writeString($this->db_name);
    @@ -10904,6 +12597,14 @@ class ThriftHiveMetastore_drop_partition
            $xfer += $output->writeBool($this->deleteData);
            $xfer += $output->writeFieldEnd();
          }
    + if ($this->environment_context !== null) {
    + if (!is_object($this->environment_context)) {
    + throw new TProtocolException('Bad type in structure.', TProtocolException::INVALID_DATA);
    + }
    + $xfer += $output->writeFieldBegin('environment_context', TType::STRUCT, 5);
    + $xfer += $this->environment_context->write($output);
    + $xfer += $output->writeFieldEnd();
    + }
          $xfer += $output->writeFieldStop();
          $xfer += $output->writeStructEnd();
          return $xfer;
    @@ -10911,7 +12612,7 @@ class ThriftHiveMetastore_drop_partition

      }

    -class ThriftHiveMetastore_drop_partition_by_name_result {
    +class ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result {
        static $_TSPEC;

        public $success = null;
    @@ -10951,7 +12652,7 @@ class ThriftHiveMetastore_drop_partition
        }

        public function getName() {
    - return 'ThriftHiveMetastore_drop_partition_by_name_result';
    + return 'ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result';
        }

        public function read($input)
    @@ -11004,7 +12705,7 @@ class ThriftHiveMetastore_drop_partition

        public function write($output) {
          $xfer = 0;
    - $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_by_name_result');
    + $xfer += $output->writeStructBegin('ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result');
          if ($this->success !== null) {
            $xfer += $output->writeFieldBegin('success', TType::BOOL, 0);
            $xfer += $output->writeBool($this->success);
    @@ -11104,14 +12805,14 @@ class ThriftHiveMetastore_get_partition_
              case 3:
                if ($ftype == TType::LST) {
                  $this->part_vals = array();
    - $_size320 = 0;
    - $_etype323 = 0;
    - $xfer += $input->readListBegin($_etype323, $_size320);
    - for ($_i324 = 0; $_i324 < $_size320; ++$_i324)
    + $_size334 = 0;
    + $_etype337 = 0;
    + $xfer += $input->readListBegin($_etype337, $_size334);
    + for ($_i338 = 0; $_i338 < $_size334; ++$_i338)
                  {
    - $elem325 = null;
    - $xfer += $input->readString($elem325);
    - $this->part_vals []= $elem325;
    + $elem339 = null;
    + $xfer += $input->readString($elem339);
    + $this->part_vals []= $elem339;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -11149,9 +12850,9 @@ class ThriftHiveMetastore_get_partition_
            {
              $output->writeListBegin(TType::STRING, count($this->part_vals));
              {
    - foreach ($this->part_vals as $iter326)
    + foreach ($this->part_vals as $iter340)
                {
    - $xfer += $output->writeString($iter326);
    + $xfer += $output->writeString($iter340);
                }
              }
              $output->writeListEnd();
    @@ -11383,14 +13084,14 @@ class ThriftHiveMetastore_get_partition_
              case 3:
                if ($ftype == TType::LST) {
                  $this->part_vals = array();
    - $_size327 = 0;
    - $_etype330 = 0;
    - $xfer += $input->readListBegin($_etype330, $_size327);
    - for ($_i331 = 0; $_i331 < $_size327; ++$_i331)
    + $_size341 = 0;
    + $_etype344 = 0;
    + $xfer += $input->readListBegin($_etype344, $_size341);
    + for ($_i345 = 0; $_i345 < $_size341; ++$_i345)
                  {
    - $elem332 = null;
    - $xfer += $input->readString($elem332);
    - $this->part_vals []= $elem332;
    + $elem346 = null;
    + $xfer += $input->readString($elem346);
    + $this->part_vals []= $elem346;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -11407,14 +13108,14 @@ class ThriftHiveMetastore_get_partition_
              case 5:
                if ($ftype == TType::LST) {
                  $this->group_names = array();
    - $_size333 = 0;
    - $_etype336 = 0;
    - $xfer += $input->readListBegin($_etype336, $_size333);
    - for ($_i337 = 0; $_i337 < $_size333; ++$_i337)
    + $_size347 = 0;
    + $_etype350 = 0;
    + $xfer += $input->readListBegin($_etype350, $_size347);
    + for ($_i351 = 0; $_i351 < $_size347; ++$_i351)
                  {
    - $elem338 = null;
    - $xfer += $input->readString($elem338);
    - $this->group_names []= $elem338;
    + $elem352 = null;
    + $xfer += $input->readString($elem352);
    + $this->group_names []= $elem352;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -11452,9 +13153,9 @@ class ThriftHiveMetastore_get_partition_
            {
              $output->writeListBegin(TType::STRING, count($this->part_vals));
              {
    - foreach ($this->part_vals as $iter339)
    + foreach ($this->part_vals as $iter353)
                {
    - $xfer += $output->writeString($iter339);
    + $xfer += $output->writeString($iter353);
                }
              }
              $output->writeListEnd();
    @@ -11474,9 +13175,9 @@ class ThriftHiveMetastore_get_partition_
            {
              $output->writeListBegin(TType::STRING, count($this->group_names));
              {
    - foreach ($this->group_names as $iter340)
    + foreach ($this->group_names as $iter354)
                {
    - $xfer += $output->writeString($iter340);
    + $xfer += $output->writeString($iter354);
                }
              }
              $output->writeListEnd();
    @@ -12022,15 +13723,15 @@ class ThriftHiveMetastore_get_partitions
              case 0:
                if ($ftype == TType::LST) {
                  $this->success = array();
    - $_size341 = 0;
    - $_etype344 = 0;
    - $xfer += $input->readListBegin($_etype344, $_size341);
    - for ($_i345 = 0; $_i345 < $_size341; ++$_i345)
    + $_size355 = 0;
    + $_etype358 = 0;
    + $xfer += $input->readListBegin($_etype358, $_size355);
    + for ($_i359 = 0; $_i359 < $_size355; ++$_i359)
                  {
    - $elem346 = null;
    - $elem346 = new \metastore\Partition();
    - $xfer += $elem346->read($input);
    - $this->success []= $elem346;
    + $elem360 = null;
    + $elem360 = new \metastore\Partition();
    + $xfer += $elem360->read($input);
    + $this->success []= $elem360;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -12074,9 +13775,9 @@ class ThriftHiveMetastore_get_partitions
            {
              $output->writeListBegin(TType::STRUCT, count($this->success));
              {
    - foreach ($this->success as $iter347)
    + foreach ($this->success as $iter361)
                {
    - $xfer += $iter347->write($output);
    + $xfer += $iter361->write($output);
                }
              }
              $output->writeListEnd();
    @@ -12207,14 +13908,14 @@ class ThriftHiveMetastore_get_partitions
              case 5:
                if ($ftype == TType::LST) {
                  $this->group_names = array();
    - $_size348 = 0;
    - $_etype351 = 0;
    - $xfer += $input->readListBegin($_etype351, $_size348);
    - for ($_i352 = 0; $_i352 < $_size348; ++$_i352)
    + $_size362 = 0;
    + $_etype365 = 0;
    + $xfer += $input->readListBegin($_etype365, $_size362);
    + for ($_i366 = 0; $_i366 < $_size362; ++$_i366)
                  {
    - $elem353 = null;
    - $xfer += $input->readString($elem353);
    - $this->group_names []= $elem353;
    + $elem367 = null;
    + $xfer += $input->readString($elem367);
    + $this->group_names []= $elem367;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -12262,9 +13963,9 @@ class ThriftHiveMetastore_get_partitions
            {
              $output->writeListBegin(TType::STRING, count($this->group_names));
              {
    - foreach ($this->group_names as $iter354)
    + foreach ($this->group_names as $iter368)
                {
    - $xfer += $output->writeString($iter354);
    + $xfer += $output->writeString($iter368);
                }
              }
              $output->writeListEnd();
    @@ -12344,15 +14045,15 @@ class ThriftHiveMetastore_get_partitions
              case 0:
                if ($ftype == TType::LST) {
                  $this->success = array();
    - $_size355 = 0;
    - $_etype358 = 0;
    - $xfer += $input->readListBegin($_etype358, $_size355);
    - for ($_i359 = 0; $_i359 < $_size355; ++$_i359)
    + $_size369 = 0;
    + $_etype372 = 0;
    + $xfer += $input->readListBegin($_etype372, $_size369);
    + for ($_i373 = 0; $_i373 < $_size369; ++$_i373)
                  {
    - $elem360 = null;
    - $elem360 = new \metastore\Partition();
    - $xfer += $elem360->read($input);
    - $this->success []= $elem360;
    + $elem374 = null;
    + $elem374 = new \metastore\Partition();
    + $xfer += $elem374->read($input);
    + $this->success []= $elem374;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -12396,9 +14097,9 @@ class ThriftHiveMetastore_get_partitions
            {
              $output->writeListBegin(TType::STRUCT, count($this->success));
              {
    - foreach ($this->success as $iter361)
    + foreach ($this->success as $iter375)
                {
    - $xfer += $iter361->write($output);
    + $xfer += $iter375->write($output);
                }
              }
              $output->writeListEnd();
    @@ -12590,14 +14291,14 @@ class ThriftHiveMetastore_get_partition_
              case 0:
                if ($ftype == TType::LST) {
                  $this->success = array();
    - $_size362 = 0;
    - $_etype365 = 0;
    - $xfer += $input->readListBegin($_etype365, $_size362);
    - for ($_i366 = 0; $_i366 < $_size362; ++$_i366)
    + $_size376 = 0;
    + $_etype379 = 0;
    + $xfer += $input->readListBegin($_etype379, $_size376);
    + for ($_i380 = 0; $_i380 < $_size376; ++$_i380)
                  {
    - $elem367 = null;
    - $xfer += $input->readString($elem367);
    - $this->success []= $elem367;
    + $elem381 = null;
    + $xfer += $input->readString($elem381);
    + $this->success []= $elem381;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -12633,9 +14334,9 @@ class ThriftHiveMetastore_get_partition_
            {
              $output->writeListBegin(TType::STRING, count($this->success));
              {
    - foreach ($this->success as $iter368)
    + foreach ($this->success as $iter382)
                {
    - $xfer += $output->writeString($iter368);
    + $xfer += $output->writeString($iter382);
                }
              }
              $output->writeListEnd();
    @@ -12739,14 +14440,14 @@ class ThriftHiveMetastore_get_partitions
              case 3:
                if ($ftype == TType::LST) {
                  $this->part_vals = array();
    - $_size369 = 0;
    - $_etype372 = 0;
    - $xfer += $input->readListBegin($_etype372, $_size369);
    - for ($_i373 = 0; $_i373 < $_size369; ++$_i373)
    + $_size383 = 0;
    + $_etype386 = 0;
    + $xfer += $input->readListBegin($_etype386, $_size383);
    + for ($_i387 = 0; $_i387 < $_size383; ++$_i387)
                  {
    - $elem374 = null;
    - $xfer += $input->readString($elem374);
    - $this->part_vals []= $elem374;
    + $elem388 = null;
    + $xfer += $input->readString($elem388);
    + $this->part_vals []= $elem388;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -12791,9 +14492,9 @@ class ThriftHiveMetastore_get_partitions
            {
              $output->writeListBegin(TType::STRING, count($this->part_vals));
              {
    - foreach ($this->part_vals as $iter375)
    + foreach ($this->part_vals as $iter389)
                {
    - $xfer += $output->writeString($iter375);
    + $xfer += $output->writeString($iter389);
                }
              }
              $output->writeListEnd();
    @@ -12878,15 +14579,15 @@ class ThriftHiveMetastore_get_partitions
              case 0:
                if ($ftype == TType::LST) {
                  $this->success = array();
    - $_size376 = 0;
    - $_etype379 = 0;
    - $xfer += $input->readListBegin($_etype379, $_size376);
    - for ($_i380 = 0; $_i380 < $_size376; ++$_i380)
    + $_size390 = 0;
    + $_etype393 = 0;
    + $xfer += $input->readListBegin($_etype393, $_size390);
    + for ($_i394 = 0; $_i394 < $_size390; ++$_i394)
                  {
    - $elem381 = null;
    - $elem381 = new \metastore\Partition();
    - $xfer += $elem381->read($input);
    - $this->success []= $elem381;
    + $elem395 = null;
    + $elem395 = new \metastore\Partition();
    + $xfer += $elem395->read($input);
    + $this->success []= $elem395;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -12930,9 +14631,9 @@ class ThriftHiveMetastore_get_partitions
            {
              $output->writeListBegin(TType::STRUCT, count($this->success));
              {
    - foreach ($this->success as $iter382)
    + foreach ($this->success as $iter396)
                {
    - $xfer += $iter382->write($output);
    + $xfer += $iter396->write($output);
                }
              }
              $output->writeListEnd();
    @@ -13061,14 +14762,14 @@ class ThriftHiveMetastore_get_partitions
              case 3:
                if ($ftype == TType::LST) {
                  $this->part_vals = array();
    - $_size383 = 0;
    - $_etype386 = 0;
    - $xfer += $input->readListBegin($_etype386, $_size383);
    - for ($_i387 = 0; $_i387 < $_size383; ++$_i387)
    + $_size397 = 0;
    + $_etype400 = 0;
    + $xfer += $input->readListBegin($_etype400, $_size397);
    + for ($_i401 = 0; $_i401 < $_size397; ++$_i401)
                  {
    - $elem388 = null;
    - $xfer += $input->readString($elem388);
    - $this->part_vals []= $elem388;
    + $elem402 = null;
    + $xfer += $input->readString($elem402);
    + $this->part_vals []= $elem402;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -13092,14 +14793,14 @@ class ThriftHiveMetastore_get_partitions
              case 6:
                if ($ftype == TType::LST) {
                  $this->group_names = array();
    - $_size389 = 0;
    - $_etype392 = 0;
    - $xfer += $input->readListBegin($_etype392, $_size389);
    - for ($_i393 = 0; $_i393 < $_size389; ++$_i393)
    + $_size403 = 0;
    + $_etype406 = 0;
    + $xfer += $input->readListBegin($_etype406, $_size403);
    + for ($_i407 = 0; $_i407 < $_size403; ++$_i407)
                  {
    - $elem394 = null;
    - $xfer += $input->readString($elem394);
    - $this->group_names []= $elem394;
    + $elem408 = null;
    + $xfer += $input->readString($elem408);
    + $this->group_names []= $elem408;
                  }
                  $xfer += $input->readListEnd();
                } else {
    @@ -13137,9 +14838,9 @@ class ThriftHiveMetastore_get_partitions
            {
              $output->writeListBegin(TType::STRING, count($this->part_vals));
              {
    - foreach ($this->part_vals as $iter395)
    + foreach ($this->part_vals as $iter409)
                {
    - $xfer += $output->writeString($iter395);
    + $xfer += $output->writeString($iter409);
                }
              }
              $output->writeListEnd();
    @@ -13164,9 +14865,9 @@ class ThriftHiveMetastore_get_partitions
            {
              $output->writeListBegin(TType::STRING, count($this->group_names));
              {
    - foreach ($this->group_names as $iter396)
    + foreach ($this->group_names as $iter410)
                {

    [... 724 lines stripped ...]
  • Kevinwilfong at Feb 12, 2013 at 6:53 pm
    Modified: hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp Tue Feb 12 18:52:55 2013
    @@ -3312,6 +3312,236 @@ uint32_t ThriftHiveMetastore_drop_table_
        return xfer;
      }

    +uint32_t ThriftHiveMetastore_drop_table_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->dbname);
    + this->__isset.dbname = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->name);
    + this->__isset.name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 3:
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool(this->deleteData);
    + this->__isset.deleteData = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 4:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->environment_context.read(iprot);
    + this->__isset.environment_context = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_table_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_table_with_environment_context_args");
    +
    + xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
    + xfer += oprot->writeString(this->dbname);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 2);
    + xfer += oprot->writeString(this->name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 3);
    + xfer += oprot->writeBool(this->deleteData);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("environment_context", ::apache::thrift::protocol::T_STRUCT, 4);
    + xfer += this->environment_context.write(oprot);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_table_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_table_with_environment_context_pargs");
    +
    + xfer += oprot->writeFieldBegin("dbname", ::apache::thrift::protocol::T_STRING, 1);
    + xfer += oprot->writeString((*(this->dbname)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("name", ::apache::thrift::protocol::T_STRING, 2);
    + xfer += oprot->writeString((*(this->name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 3);
    + xfer += oprot->writeBool((*(this->deleteData)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("environment_context", ::apache::thrift::protocol::T_STRUCT, 4);
    + xfer += (*(this->environment_context)).write(oprot);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_table_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o1.read(iprot);
    + this->__isset.o1 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o3.read(iprot);
    + this->__isset.o3 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_table_with_environment_context_result::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +
    + uint32_t xfer = 0;
    +
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_table_with_environment_context_result");
    +
    + if (this->__isset.o1) {
    + xfer += oprot->writeFieldBegin("o1", ::apache::thrift::protocol::T_STRUCT, 1);
    + xfer += this->o1.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o3) {
    + xfer += oprot->writeFieldBegin("o3", ::apache::thrift::protocol::T_STRUCT, 2);
    + xfer += this->o3.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + }
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_table_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o1.read(iprot);
    + this->__isset.o1 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o3.read(iprot);
    + this->__isset.o3 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
      uint32_t ThriftHiveMetastore_get_tables_args::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
    @@ -5994,7 +6224,7 @@ uint32_t ThriftHiveMetastore_append_part
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_append_partition_by_name_args::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_append_partition_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6031,9 +6261,29 @@ uint32_t ThriftHiveMetastore_append_part
              }
              break;
            case 3:
    - if (ftype == ::apache::thrift::protocol::T_STRING) {
    - xfer += iprot->readString(this->part_name);
    - this->__isset.part_name = true;
    + if (ftype == ::apache::thrift::protocol::T_LIST) {
    + {
    + this->part_vals.clear();
    + uint32_t _size325;
    + ::apache::thrift::protocol::TType _etype328;
    + xfer += iprot->readListBegin(_etype328, _size325);
    + this->part_vals.resize(_size325);
    + uint32_t _i329;
    + for (_i329 = 0; _i329 < _size325; ++_i329)
    + {
    + xfer += iprot->readString(this->part_vals[_i329]);
    + }
    + xfer += iprot->readListEnd();
    + }
    + this->__isset.part_vals = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 4:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->environment_context.read(iprot);
    + this->__isset.environment_context = true;
              } else {
                xfer += iprot->skip(ftype);
              }
    @@ -6050,9 +6300,9 @@ uint32_t ThriftHiveMetastore_append_part
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_append_partition_by_name_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_append_partition_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
        uint32_t xfer = 0;
    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_by_name_args");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_with_environment_context_args");

        xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
        xfer += oprot->writeString(this->db_name);
    @@ -6062,18 +6312,30 @@ uint32_t ThriftHiveMetastore_append_part
        xfer += oprot->writeString(this->tbl_name);
        xfer += oprot->writeFieldEnd();

    - xfer += oprot->writeFieldBegin("part_name", ::apache::thrift::protocol::T_STRING, 3);
    - xfer += oprot->writeString(this->part_name);
    - xfer += oprot->writeFieldEnd();
    -
    - xfer += oprot->writeFieldStop();
    - xfer += oprot->writeStructEnd();
    - return xfer;
    -}
    -
    -uint32_t ThriftHiveMetastore_append_partition_by_name_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
    - uint32_t xfer = 0;
    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_by_name_pargs");
    + xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
    + {
    + xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
    + std::vector<std::string> ::const_iterator _iter330;
    + for (_iter330 = this->part_vals.begin(); _iter330 != this->part_vals.end(); ++_iter330)
    + {
    + xfer += oprot->writeString((*_iter330));
    + }
    + xfer += oprot->writeListEnd();
    + }
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("environment_context", ::apache::thrift::protocol::T_STRUCT, 4);
    + xfer += this->environment_context.write(oprot);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_append_partition_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_with_environment_context_pargs");

        xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
        xfer += oprot->writeString((*(this->db_name)));
    @@ -6083,8 +6345,20 @@ uint32_t ThriftHiveMetastore_append_part
        xfer += oprot->writeString((*(this->tbl_name)));
        xfer += oprot->writeFieldEnd();

    - xfer += oprot->writeFieldBegin("part_name", ::apache::thrift::protocol::T_STRING, 3);
    - xfer += oprot->writeString((*(this->part_name)));
    + xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
    + {
    + xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
    + std::vector<std::string> ::const_iterator _iter331;
    + for (_iter331 = (*(this->part_vals)).begin(); _iter331 != (*(this->part_vals)).end(); ++_iter331)
    + {
    + xfer += oprot->writeString((*_iter331));
    + }
    + xfer += oprot->writeListEnd();
    + }
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("environment_context", ::apache::thrift::protocol::T_STRUCT, 4);
    + xfer += (*(this->environment_context)).write(oprot);
        xfer += oprot->writeFieldEnd();

        xfer += oprot->writeFieldStop();
    @@ -6092,7 +6366,7 @@ uint32_t ThriftHiveMetastore_append_part
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_append_partition_by_name_result::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_append_partition_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6156,11 +6430,11 @@ uint32_t ThriftHiveMetastore_append_part
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_append_partition_by_name_result::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_append_partition_with_environment_context_result::write(::apache::thrift::protocol::TProtocol* oprot) const {

        uint32_t xfer = 0;

    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_by_name_result");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_with_environment_context_result");

        if (this->__isset.success) {
          xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_STRUCT, 0);
    @@ -6184,7 +6458,7 @@ uint32_t ThriftHiveMetastore_append_part
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_append_partition_by_name_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_append_partition_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6248,7 +6522,7 @@ uint32_t ThriftHiveMetastore_append_part
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_args::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_args::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6285,29 +6559,9 @@ uint32_t ThriftHiveMetastore_drop_partit
              }
              break;
            case 3:
    - if (ftype == ::apache::thrift::protocol::T_LIST) {
    - {
    - this->part_vals.clear();
    - uint32_t _size325;
    - ::apache::thrift::protocol::TType _etype328;
    - xfer += iprot->readListBegin(_etype328, _size325);
    - this->part_vals.resize(_size325);
    - uint32_t _i329;
    - for (_i329 = 0; _i329 < _size325; ++_i329)
    - {
    - xfer += iprot->readString(this->part_vals[_i329]);
    - }
    - xfer += iprot->readListEnd();
    - }
    - this->__isset.part_vals = true;
    - } else {
    - xfer += iprot->skip(ftype);
    - }
    - break;
    - case 4:
    - if (ftype == ::apache::thrift::protocol::T_BOOL) {
    - xfer += iprot->readBool(this->deleteData);
    - this->__isset.deleteData = true;
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->part_name);
    + this->__isset.part_name = true;
              } else {
                xfer += iprot->skip(ftype);
              }
    @@ -6324,9 +6578,9 @@ uint32_t ThriftHiveMetastore_drop_partit
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
        uint32_t xfer = 0;
    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_args");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_by_name_args");

        xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
        xfer += oprot->writeString(this->db_name);
    @@ -6336,20 +6590,8 @@ uint32_t ThriftHiveMetastore_drop_partit
        xfer += oprot->writeString(this->tbl_name);
        xfer += oprot->writeFieldEnd();

    - xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
    - {
    - xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
    - std::vector<std::string> ::const_iterator _iter330;
    - for (_iter330 = this->part_vals.begin(); _iter330 != this->part_vals.end(); ++_iter330)
    - {
    - xfer += oprot->writeString((*_iter330));
    - }
    - xfer += oprot->writeListEnd();
    - }
    - xfer += oprot->writeFieldEnd();
    -
    - xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    - xfer += oprot->writeBool(this->deleteData);
    + xfer += oprot->writeFieldBegin("part_name", ::apache::thrift::protocol::T_STRING, 3);
    + xfer += oprot->writeString(this->part_name);
        xfer += oprot->writeFieldEnd();

        xfer += oprot->writeFieldStop();
    @@ -6357,9 +6599,9 @@ uint32_t ThriftHiveMetastore_drop_partit
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
        uint32_t xfer = 0;
    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_pargs");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_by_name_pargs");

        xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
        xfer += oprot->writeString((*(this->db_name)));
    @@ -6369,20 +6611,8 @@ uint32_t ThriftHiveMetastore_drop_partit
        xfer += oprot->writeString((*(this->tbl_name)));
        xfer += oprot->writeFieldEnd();

    - xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
    - {
    - xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
    - std::vector<std::string> ::const_iterator _iter331;
    - for (_iter331 = (*(this->part_vals)).begin(); _iter331 != (*(this->part_vals)).end(); ++_iter331)
    - {
    - xfer += oprot->writeString((*_iter331));
    - }
    - xfer += oprot->writeListEnd();
    - }
    - xfer += oprot->writeFieldEnd();
    -
    - xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    - xfer += oprot->writeBool((*(this->deleteData)));
    + xfer += oprot->writeFieldBegin("part_name", ::apache::thrift::protocol::T_STRING, 3);
    + xfer += oprot->writeString((*(this->part_name)));
        xfer += oprot->writeFieldEnd();

        xfer += oprot->writeFieldStop();
    @@ -6390,7 +6620,7 @@ uint32_t ThriftHiveMetastore_drop_partit
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_result::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_result::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6411,8 +6641,8 @@ uint32_t ThriftHiveMetastore_drop_partit
          switch (fid)
          {
            case 0:
    - if (ftype == ::apache::thrift::protocol::T_BOOL) {
    - xfer += iprot->readBool(this->success);
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->success.read(iprot);
                this->__isset.success = true;
              } else {
                xfer += iprot->skip(ftype);
    @@ -6434,6 +6664,14 @@ uint32_t ThriftHiveMetastore_drop_partit
                xfer += iprot->skip(ftype);
              }
              break;
    + case 3:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o3.read(iprot);
    + this->__isset.o3 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
            default:
              xfer += iprot->skip(ftype);
              break;
    @@ -6446,15 +6684,15 @@ uint32_t ThriftHiveMetastore_drop_partit
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_result::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_result::write(::apache::thrift::protocol::TProtocol* oprot) const {

        uint32_t xfer = 0;

    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_result");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_by_name_result");

        if (this->__isset.success) {
    - xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_BOOL, 0);
    - xfer += oprot->writeBool(this->success);
    + xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_STRUCT, 0);
    + xfer += this->success.write(oprot);
          xfer += oprot->writeFieldEnd();
        } else if (this->__isset.o1) {
          xfer += oprot->writeFieldBegin("o1", ::apache::thrift::protocol::T_STRUCT, 1);
    @@ -6464,13 +6702,17 @@ uint32_t ThriftHiveMetastore_drop_partit
          xfer += oprot->writeFieldBegin("o2", ::apache::thrift::protocol::T_STRUCT, 2);
          xfer += this->o2.write(oprot);
          xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o3) {
    + xfer += oprot->writeFieldBegin("o3", ::apache::thrift::protocol::T_STRUCT, 3);
    + xfer += this->o3.write(oprot);
    + xfer += oprot->writeFieldEnd();
        }
        xfer += oprot->writeFieldStop();
        xfer += oprot->writeStructEnd();
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_presult::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6491,8 +6733,8 @@ uint32_t ThriftHiveMetastore_drop_partit
          switch (fid)
          {
            case 0:
    - if (ftype == ::apache::thrift::protocol::T_BOOL) {
    - xfer += iprot->readBool((*(this->success)));
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += (*(this->success)).read(iprot);
                this->__isset.success = true;
              } else {
                xfer += iprot->skip(ftype);
    @@ -6514,6 +6756,14 @@ uint32_t ThriftHiveMetastore_drop_partit
                xfer += iprot->skip(ftype);
              }
              break;
    + case 3:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o3.read(iprot);
    + this->__isset.o3 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
            default:
              xfer += iprot->skip(ftype);
              break;
    @@ -6526,7 +6776,7 @@ uint32_t ThriftHiveMetastore_drop_partit
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_by_name_args::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6571,9 +6821,9 @@ uint32_t ThriftHiveMetastore_drop_partit
              }
              break;
            case 4:
    - if (ftype == ::apache::thrift::protocol::T_BOOL) {
    - xfer += iprot->readBool(this->deleteData);
    - this->__isset.deleteData = true;
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->environment_context.read(iprot);
    + this->__isset.environment_context = true;
              } else {
                xfer += iprot->skip(ftype);
              }
    @@ -6590,9 +6840,9 @@ uint32_t ThriftHiveMetastore_drop_partit
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_by_name_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
        uint32_t xfer = 0;
    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_by_name_args");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_by_name_with_environment_context_args");

        xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
        xfer += oprot->writeString(this->db_name);
    @@ -6606,8 +6856,8 @@ uint32_t ThriftHiveMetastore_drop_partit
        xfer += oprot->writeString(this->part_name);
        xfer += oprot->writeFieldEnd();

    - xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    - xfer += oprot->writeBool(this->deleteData);
    + xfer += oprot->writeFieldBegin("environment_context", ::apache::thrift::protocol::T_STRUCT, 4);
    + xfer += this->environment_context.write(oprot);
        xfer += oprot->writeFieldEnd();

        xfer += oprot->writeFieldStop();
    @@ -6615,9 +6865,9 @@ uint32_t ThriftHiveMetastore_drop_partit
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_by_name_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
        uint32_t xfer = 0;
    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_by_name_pargs");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_by_name_with_environment_context_pargs");

        xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
        xfer += oprot->writeString((*(this->db_name)));
    @@ -6631,8 +6881,8 @@ uint32_t ThriftHiveMetastore_drop_partit
        xfer += oprot->writeString((*(this->part_name)));
        xfer += oprot->writeFieldEnd();

    - xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    - xfer += oprot->writeBool((*(this->deleteData)));
    + xfer += oprot->writeFieldBegin("environment_context", ::apache::thrift::protocol::T_STRUCT, 4);
    + xfer += (*(this->environment_context)).write(oprot);
        xfer += oprot->writeFieldEnd();

        xfer += oprot->writeFieldStop();
    @@ -6640,7 +6890,7 @@ uint32_t ThriftHiveMetastore_drop_partit
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_by_name_result::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6661,8 +6911,8 @@ uint32_t ThriftHiveMetastore_drop_partit
          switch (fid)
          {
            case 0:
    - if (ftype == ::apache::thrift::protocol::T_BOOL) {
    - xfer += iprot->readBool(this->success);
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->success.read(iprot);
                this->__isset.success = true;
              } else {
                xfer += iprot->skip(ftype);
    @@ -6684,6 +6934,14 @@ uint32_t ThriftHiveMetastore_drop_partit
                xfer += iprot->skip(ftype);
              }
              break;
    + case 3:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o3.read(iprot);
    + this->__isset.o3 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
            default:
              xfer += iprot->skip(ftype);
              break;
    @@ -6696,15 +6954,15 @@ uint32_t ThriftHiveMetastore_drop_partit
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_by_name_result::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_with_environment_context_result::write(::apache::thrift::protocol::TProtocol* oprot) const {

        uint32_t xfer = 0;

    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_by_name_result");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_append_partition_by_name_with_environment_context_result");

        if (this->__isset.success) {
    - xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_BOOL, 0);
    - xfer += oprot->writeBool(this->success);
    + xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_STRUCT, 0);
    + xfer += this->success.write(oprot);
          xfer += oprot->writeFieldEnd();
        } else if (this->__isset.o1) {
          xfer += oprot->writeFieldBegin("o1", ::apache::thrift::protocol::T_STRUCT, 1);
    @@ -6714,13 +6972,17 @@ uint32_t ThriftHiveMetastore_drop_partit
          xfer += oprot->writeFieldBegin("o2", ::apache::thrift::protocol::T_STRUCT, 2);
          xfer += this->o2.write(oprot);
          xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o3) {
    + xfer += oprot->writeFieldBegin("o3", ::apache::thrift::protocol::T_STRUCT, 3);
    + xfer += this->o3.write(oprot);
    + xfer += oprot->writeFieldEnd();
        }
        xfer += oprot->writeFieldStop();
        xfer += oprot->writeStructEnd();
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_drop_partition_by_name_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_append_partition_by_name_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6741,8 +7003,8 @@ uint32_t ThriftHiveMetastore_drop_partit
          switch (fid)
          {
            case 0:
    - if (ftype == ::apache::thrift::protocol::T_BOOL) {
    - xfer += iprot->readBool((*(this->success)));
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += (*(this->success)).read(iprot);
                this->__isset.success = true;
              } else {
                xfer += iprot->skip(ftype);
    @@ -6764,6 +7026,14 @@ uint32_t ThriftHiveMetastore_drop_partit
                xfer += iprot->skip(ftype);
              }
              break;
    + case 3:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o3.read(iprot);
    + this->__isset.o3 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
            default:
              xfer += iprot->skip(ftype);
              break;
    @@ -6776,7 +7046,7 @@ uint32_t ThriftHiveMetastore_drop_partit
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_get_partition_args::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_drop_partition_args::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6832,6 +7102,14 @@ uint32_t ThriftHiveMetastore_get_partiti
                xfer += iprot->skip(ftype);
              }
              break;
    + case 4:
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool(this->deleteData);
    + this->__isset.deleteData = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
            default:
              xfer += iprot->skip(ftype);
              break;
    @@ -6844,9 +7122,9 @@ uint32_t ThriftHiveMetastore_get_partiti
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_get_partition_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_drop_partition_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
        uint32_t xfer = 0;
    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_partition_args");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_args");

        xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
        xfer += oprot->writeString(this->db_name);
    @@ -6868,14 +7146,18 @@ uint32_t ThriftHiveMetastore_get_partiti
        }
        xfer += oprot->writeFieldEnd();

    + xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    + xfer += oprot->writeBool(this->deleteData);
    + xfer += oprot->writeFieldEnd();
    +
        xfer += oprot->writeFieldStop();
        xfer += oprot->writeStructEnd();
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_get_partition_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_drop_partition_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
        uint32_t xfer = 0;
    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_partition_pargs");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_pargs");

        xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
        xfer += oprot->writeString((*(this->db_name)));
    @@ -6897,12 +7179,16 @@ uint32_t ThriftHiveMetastore_get_partiti
        }
        xfer += oprot->writeFieldEnd();

    + xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    + xfer += oprot->writeBool((*(this->deleteData)));
    + xfer += oprot->writeFieldEnd();
    +
        xfer += oprot->writeFieldStop();
        xfer += oprot->writeStructEnd();
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_get_partition_result::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_drop_partition_result::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -6923,8 +7209,8 @@ uint32_t ThriftHiveMetastore_get_partiti
          switch (fid)
          {
            case 0:
    - if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    - xfer += this->success.read(iprot);
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool(this->success);
                this->__isset.success = true;
              } else {
                xfer += iprot->skip(ftype);
    @@ -6958,15 +7244,15 @@ uint32_t ThriftHiveMetastore_get_partiti
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_get_partition_result::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +uint32_t ThriftHiveMetastore_drop_partition_result::write(::apache::thrift::protocol::TProtocol* oprot) const {

        uint32_t xfer = 0;

    - xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_partition_result");
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_result");

        if (this->__isset.success) {
    - xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_STRUCT, 0);
    - xfer += this->success.write(oprot);
    + xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_BOOL, 0);
    + xfer += oprot->writeBool(this->success);
          xfer += oprot->writeFieldEnd();
        } else if (this->__isset.o1) {
          xfer += oprot->writeFieldBegin("o1", ::apache::thrift::protocol::T_STRUCT, 1);
    @@ -6982,7 +7268,7 @@ uint32_t ThriftHiveMetastore_get_partiti
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_get_partition_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_drop_partition_presult::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -7003,8 +7289,8 @@ uint32_t ThriftHiveMetastore_get_partiti
          switch (fid)
          {
            case 0:
    - if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    - xfer += (*(this->success)).read(iprot);
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool((*(this->success)));
                this->__isset.success = true;
              } else {
                xfer += iprot->skip(ftype);
    @@ -7038,7 +7324,7 @@ uint32_t ThriftHiveMetastore_get_partiti
        return xfer;
      }

    -uint32_t ThriftHiveMetastore_get_partition_with_auth_args::read(::apache::thrift::protocol::TProtocol* iprot) {
    +uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {

        uint32_t xfer = 0;
        std::string fname;
    @@ -7095,9 +7381,1081 @@ uint32_t ThriftHiveMetastore_get_partiti
              }
              break;
            case 4:
    - if (ftype == ::apache::thrift::protocol::T_STRING) {
    - xfer += iprot->readString(this->user_name);
    - this->__isset.user_name = true;
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool(this->deleteData);
    + this->__isset.deleteData = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 5:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->environment_context.read(iprot);
    + this->__isset.environment_context = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_with_environment_context_args");
    +
    + xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
    + xfer += oprot->writeString(this->db_name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("tbl_name", ::apache::thrift::protocol::T_STRING, 2);
    + xfer += oprot->writeString(this->tbl_name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
    + {
    + xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
    + std::vector<std::string> ::const_iterator _iter344;
    + for (_iter344 = this->part_vals.begin(); _iter344 != this->part_vals.end(); ++_iter344)
    + {
    + xfer += oprot->writeString((*_iter344));
    + }
    + xfer += oprot->writeListEnd();
    + }
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    + xfer += oprot->writeBool(this->deleteData);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("environment_context", ::apache::thrift::protocol::T_STRUCT, 5);
    + xfer += this->environment_context.write(oprot);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_with_environment_context_pargs");
    +
    + xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
    + xfer += oprot->writeString((*(this->db_name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("tbl_name", ::apache::thrift::protocol::T_STRING, 2);
    + xfer += oprot->writeString((*(this->tbl_name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
    + {
    + xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
    + std::vector<std::string> ::const_iterator _iter345;
    + for (_iter345 = (*(this->part_vals)).begin(); _iter345 != (*(this->part_vals)).end(); ++_iter345)
    + {
    + xfer += oprot->writeString((*_iter345));
    + }
    + xfer += oprot->writeListEnd();
    + }
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    + xfer += oprot->writeBool((*(this->deleteData)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("environment_context", ::apache::thrift::protocol::T_STRUCT, 5);
    + xfer += (*(this->environment_context)).write(oprot);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 0:
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool(this->success);
    + this->__isset.success = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o1.read(iprot);
    + this->__isset.o1 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o2.read(iprot);
    + this->__isset.o2 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_result::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +
    + uint32_t xfer = 0;
    +
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_with_environment_context_result");
    +
    + if (this->__isset.success) {
    + xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_BOOL, 0);
    + xfer += oprot->writeBool(this->success);
    + xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o1) {
    + xfer += oprot->writeFieldBegin("o1", ::apache::thrift::protocol::T_STRUCT, 1);
    + xfer += this->o1.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o2) {
    + xfer += oprot->writeFieldBegin("o2", ::apache::thrift::protocol::T_STRUCT, 2);
    + xfer += this->o2.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + }
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 0:
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool((*(this->success)));
    + this->__isset.success = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o1.read(iprot);
    + this->__isset.o1 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o2.read(iprot);
    + this->__isset.o2 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_args::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->db_name);
    + this->__isset.db_name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->tbl_name);
    + this->__isset.tbl_name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 3:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->part_name);
    + this->__isset.part_name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 4:
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool(this->deleteData);
    + this->__isset.deleteData = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_by_name_args");
    +
    + xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
    + xfer += oprot->writeString(this->db_name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("tbl_name", ::apache::thrift::protocol::T_STRING, 2);
    + xfer += oprot->writeString(this->tbl_name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("part_name", ::apache::thrift::protocol::T_STRING, 3);
    + xfer += oprot->writeString(this->part_name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    + xfer += oprot->writeBool(this->deleteData);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_by_name_pargs");
    +
    + xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
    + xfer += oprot->writeString((*(this->db_name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("tbl_name", ::apache::thrift::protocol::T_STRING, 2);
    + xfer += oprot->writeString((*(this->tbl_name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("part_name", ::apache::thrift::protocol::T_STRING, 3);
    + xfer += oprot->writeString((*(this->part_name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    + xfer += oprot->writeBool((*(this->deleteData)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_result::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 0:
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool(this->success);
    + this->__isset.success = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o1.read(iprot);
    + this->__isset.o1 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o2.read(iprot);
    + this->__isset.o2 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_result::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +
    + uint32_t xfer = 0;
    +
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_by_name_result");
    +
    + if (this->__isset.success) {
    + xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_BOOL, 0);
    + xfer += oprot->writeBool(this->success);
    + xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o1) {
    + xfer += oprot->writeFieldBegin("o1", ::apache::thrift::protocol::T_STRUCT, 1);
    + xfer += this->o1.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o2) {
    + xfer += oprot->writeFieldBegin("o2", ::apache::thrift::protocol::T_STRUCT, 2);
    + xfer += this->o2.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + }
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 0:
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool((*(this->success)));
    + this->__isset.success = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o1.read(iprot);
    + this->__isset.o1 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o2.read(iprot);
    + this->__isset.o2 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->db_name);
    + this->__isset.db_name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->tbl_name);
    + this->__isset.tbl_name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 3:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->part_name);
    + this->__isset.part_name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 4:
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool(this->deleteData);
    + this->__isset.deleteData = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 5:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->environment_context.read(iprot);
    + this->__isset.environment_context = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_by_name_with_environment_context_args");
    +
    + xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
    + xfer += oprot->writeString(this->db_name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("tbl_name", ::apache::thrift::protocol::T_STRING, 2);
    + xfer += oprot->writeString(this->tbl_name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("part_name", ::apache::thrift::protocol::T_STRING, 3);
    + xfer += oprot->writeString(this->part_name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    + xfer += oprot->writeBool(this->deleteData);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("environment_context", ::apache::thrift::protocol::T_STRUCT, 5);
    + xfer += this->environment_context.write(oprot);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_with_environment_context_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_by_name_with_environment_context_pargs");
    +
    + xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
    + xfer += oprot->writeString((*(this->db_name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("tbl_name", ::apache::thrift::protocol::T_STRING, 2);
    + xfer += oprot->writeString((*(this->tbl_name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("part_name", ::apache::thrift::protocol::T_STRING, 3);
    + xfer += oprot->writeString((*(this->part_name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("deleteData", ::apache::thrift::protocol::T_BOOL, 4);
    + xfer += oprot->writeBool((*(this->deleteData)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("environment_context", ::apache::thrift::protocol::T_STRUCT, 5);
    + xfer += (*(this->environment_context)).write(oprot);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 0:
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool(this->success);
    + this->__isset.success = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o1.read(iprot);
    + this->__isset.o1 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o2.read(iprot);
    + this->__isset.o2 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +
    + uint32_t xfer = 0;
    +
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_drop_partition_by_name_with_environment_context_result");
    +
    + if (this->__isset.success) {
    + xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_BOOL, 0);
    + xfer += oprot->writeBool(this->success);
    + xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o1) {
    + xfer += oprot->writeFieldBegin("o1", ::apache::thrift::protocol::T_STRUCT, 1);
    + xfer += this->o1.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o2) {
    + xfer += oprot->writeFieldBegin("o2", ::apache::thrift::protocol::T_STRUCT, 2);
    + xfer += this->o2.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + }
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_drop_partition_by_name_with_environment_context_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 0:
    + if (ftype == ::apache::thrift::protocol::T_BOOL) {
    + xfer += iprot->readBool((*(this->success)));
    + this->__isset.success = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o1.read(iprot);
    + this->__isset.o1 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o2.read(iprot);
    + this->__isset.o2 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_get_partition_args::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->db_name);
    + this->__isset.db_name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->tbl_name);
    + this->__isset.tbl_name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 3:
    + if (ftype == ::apache::thrift::protocol::T_LIST) {
    + {
    + this->part_vals.clear();
    + uint32_t _size346;
    + ::apache::thrift::protocol::TType _etype349;
    + xfer += iprot->readListBegin(_etype349, _size346);
    + this->part_vals.resize(_size346);
    + uint32_t _i350;
    + for (_i350 = 0; _i350 < _size346; ++_i350)
    + {
    + xfer += iprot->readString(this->part_vals[_i350]);
    + }
    + xfer += iprot->readListEnd();
    + }
    + this->__isset.part_vals = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_get_partition_args::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_partition_args");
    +
    + xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
    + xfer += oprot->writeString(this->db_name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("tbl_name", ::apache::thrift::protocol::T_STRING, 2);
    + xfer += oprot->writeString(this->tbl_name);
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
    + {
    + xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
    + std::vector<std::string> ::const_iterator _iter351;
    + for (_iter351 = this->part_vals.begin(); _iter351 != this->part_vals.end(); ++_iter351)
    + {
    + xfer += oprot->writeString((*_iter351));
    + }
    + xfer += oprot->writeListEnd();
    + }
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_get_partition_pargs::write(::apache::thrift::protocol::TProtocol* oprot) const {
    + uint32_t xfer = 0;
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_partition_pargs");
    +
    + xfer += oprot->writeFieldBegin("db_name", ::apache::thrift::protocol::T_STRING, 1);
    + xfer += oprot->writeString((*(this->db_name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("tbl_name", ::apache::thrift::protocol::T_STRING, 2);
    + xfer += oprot->writeString((*(this->tbl_name)));
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
    + {
    + xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
    + std::vector<std::string> ::const_iterator _iter352;
    + for (_iter352 = (*(this->part_vals)).begin(); _iter352 != (*(this->part_vals)).end(); ++_iter352)
    + {
    + xfer += oprot->writeString((*_iter352));
    + }
    + xfer += oprot->writeListEnd();
    + }
    + xfer += oprot->writeFieldEnd();
    +
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_get_partition_result::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 0:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->success.read(iprot);
    + this->__isset.success = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o1.read(iprot);
    + this->__isset.o1 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o2.read(iprot);
    + this->__isset.o2 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_get_partition_result::write(::apache::thrift::protocol::TProtocol* oprot) const {
    +
    + uint32_t xfer = 0;
    +
    + xfer += oprot->writeStructBegin("ThriftHiveMetastore_get_partition_result");
    +
    + if (this->__isset.success) {
    + xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_STRUCT, 0);
    + xfer += this->success.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o1) {
    + xfer += oprot->writeFieldBegin("o1", ::apache::thrift::protocol::T_STRUCT, 1);
    + xfer += this->o1.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + } else if (this->__isset.o2) {
    + xfer += oprot->writeFieldBegin("o2", ::apache::thrift::protocol::T_STRUCT, 2);
    + xfer += this->o2.write(oprot);
    + xfer += oprot->writeFieldEnd();
    + }
    + xfer += oprot->writeFieldStop();
    + xfer += oprot->writeStructEnd();
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_get_partition_presult::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 0:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += (*(this->success)).read(iprot);
    + this->__isset.success = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o1.read(iprot);
    + this->__isset.o1 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRUCT) {
    + xfer += this->o2.read(iprot);
    + this->__isset.o2 = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + default:
    + xfer += iprot->skip(ftype);
    + break;
    + }
    + xfer += iprot->readFieldEnd();
    + }
    +
    + xfer += iprot->readStructEnd();
    +
    + return xfer;
    +}
    +
    +uint32_t ThriftHiveMetastore_get_partition_with_auth_args::read(::apache::thrift::protocol::TProtocol* iprot) {
    +
    + uint32_t xfer = 0;
    + std::string fname;
    + ::apache::thrift::protocol::TType ftype;
    + int16_t fid;
    +
    + xfer += iprot->readStructBegin(fname);
    +
    + using ::apache::thrift::protocol::TProtocolException;
    +
    +
    + while (true)
    + {
    + xfer += iprot->readFieldBegin(fname, ftype, fid);
    + if (ftype == ::apache::thrift::protocol::T_STOP) {
    + break;
    + }
    + switch (fid)
    + {
    + case 1:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->db_name);
    + this->__isset.db_name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 2:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->tbl_name);
    + this->__isset.tbl_name = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 3:
    + if (ftype == ::apache::thrift::protocol::T_LIST) {
    + {
    + this->part_vals.clear();
    + uint32_t _size353;
    + ::apache::thrift::protocol::TType _etype356;
    + xfer += iprot->readListBegin(_etype356, _size353);
    + this->part_vals.resize(_size353);
    + uint32_t _i357;
    + for (_i357 = 0; _i357 < _size353; ++_i357)
    + {
    + xfer += iprot->readString(this->part_vals[_i357]);
    + }
    + xfer += iprot->readListEnd();
    + }
    + this->__isset.part_vals = true;
    + } else {
    + xfer += iprot->skip(ftype);
    + }
    + break;
    + case 4:
    + if (ftype == ::apache::thrift::protocol::T_STRING) {
    + xfer += iprot->readString(this->user_name);
    + this->__isset.user_name = true;
              } else {
                xfer += iprot->skip(ftype);
              }
    @@ -7106,14 +8464,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  this->group_names.clear();
    - uint32_t _size344;
    - ::apache::thrift::protocol::TType _etype347;
    - xfer += iprot->readListBegin(_etype347, _size344);
    - this->group_names.resize(_size344);
    - uint32_t _i348;
    - for (_i348 = 0; _i348 < _size344; ++_i348)
    + uint32_t _size358;
    + ::apache::thrift::protocol::TType _etype361;
    + xfer += iprot->readListBegin(_etype361, _size358);
    + this->group_names.resize(_size358);
    + uint32_t _i362;
    + for (_i362 = 0; _i362 < _size358; ++_i362)
                  {
    - xfer += iprot->readString(this->group_names[_i348]);
    + xfer += iprot->readString(this->group_names[_i362]);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -7149,10 +8507,10 @@ uint32_t ThriftHiveMetastore_get_partiti
        xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
        {
          xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
    - std::vector<std::string> ::const_iterator _iter349;
    - for (_iter349 = this->part_vals.begin(); _iter349 != this->part_vals.end(); ++_iter349)
    + std::vector<std::string> ::const_iterator _iter363;
    + for (_iter363 = this->part_vals.begin(); _iter363 != this->part_vals.end(); ++_iter363)
          {
    - xfer += oprot->writeString((*_iter349));
    + xfer += oprot->writeString((*_iter363));
          }
          xfer += oprot->writeListEnd();
        }
    @@ -7165,10 +8523,10 @@ uint32_t ThriftHiveMetastore_get_partiti
        xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
        {
          xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->group_names.size()));
    - std::vector<std::string> ::const_iterator _iter350;
    - for (_iter350 = this->group_names.begin(); _iter350 != this->group_names.end(); ++_iter350)
    + std::vector<std::string> ::const_iterator _iter364;
    + for (_iter364 = this->group_names.begin(); _iter364 != this->group_names.end(); ++_iter364)
          {
    - xfer += oprot->writeString((*_iter350));
    + xfer += oprot->writeString((*_iter364));
          }
          xfer += oprot->writeListEnd();
        }
    @@ -7194,10 +8552,10 @@ uint32_t ThriftHiveMetastore_get_partiti
        xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
        {
          xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
    - std::vector<std::string> ::const_iterator _iter351;
    - for (_iter351 = (*(this->part_vals)).begin(); _iter351 != (*(this->part_vals)).end(); ++_iter351)
    + std::vector<std::string> ::const_iterator _iter365;
    + for (_iter365 = (*(this->part_vals)).begin(); _iter365 != (*(this->part_vals)).end(); ++_iter365)
          {
    - xfer += oprot->writeString((*_iter351));
    + xfer += oprot->writeString((*_iter365));
          }
          xfer += oprot->writeListEnd();
        }
    @@ -7210,10 +8568,10 @@ uint32_t ThriftHiveMetastore_get_partiti
        xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
        {
          xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->group_names)).size()));
    - std::vector<std::string> ::const_iterator _iter352;
    - for (_iter352 = (*(this->group_names)).begin(); _iter352 != (*(this->group_names)).end(); ++_iter352)
    + std::vector<std::string> ::const_iterator _iter366;
    + for (_iter366 = (*(this->group_names)).begin(); _iter366 != (*(this->group_names)).end(); ++_iter366)
          {
    - xfer += oprot->writeString((*_iter352));
    + xfer += oprot->writeString((*_iter366));
          }
          xfer += oprot->writeListEnd();
        }
    @@ -7716,14 +9074,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  this->success.clear();
    - uint32_t _size353;
    - ::apache::thrift::protocol::TType _etype356;
    - xfer += iprot->readListBegin(_etype356, _size353);
    - this->success.resize(_size353);
    - uint32_t _i357;
    - for (_i357 = 0; _i357 < _size353; ++_i357)
    + uint32_t _size367;
    + ::apache::thrift::protocol::TType _etype370;
    + xfer += iprot->readListBegin(_etype370, _size367);
    + this->success.resize(_size367);
    + uint32_t _i371;
    + for (_i371 = 0; _i371 < _size367; ++_i371)
                  {
    - xfer += this->success[_i357].read(iprot);
    + xfer += this->success[_i371].read(iprot);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -7770,10 +9128,10 @@ uint32_t ThriftHiveMetastore_get_partiti
          xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
          {
            xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
    - std::vector<Partition> ::const_iterator _iter358;
    - for (_iter358 = this->success.begin(); _iter358 != this->success.end(); ++_iter358)
    + std::vector<Partition> ::const_iterator _iter372;
    + for (_iter372 = this->success.begin(); _iter372 != this->success.end(); ++_iter372)
            {
    - xfer += (*_iter358).write(oprot);
    + xfer += (*_iter372).write(oprot);
            }
            xfer += oprot->writeListEnd();
          }
    @@ -7816,14 +9174,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  (*(this->success)).clear();
    - uint32_t _size359;
    - ::apache::thrift::protocol::TType _etype362;
    - xfer += iprot->readListBegin(_etype362, _size359);
    - (*(this->success)).resize(_size359);
    - uint32_t _i363;
    - for (_i363 = 0; _i363 < _size359; ++_i363)
    + uint32_t _size373;
    + ::apache::thrift::protocol::TType _etype376;
    + xfer += iprot->readListBegin(_etype376, _size373);
    + (*(this->success)).resize(_size373);
    + uint32_t _i377;
    + for (_i377 = 0; _i377 < _size373; ++_i377)
                  {
    - xfer += (*(this->success))[_i363].read(iprot);
    + xfer += (*(this->success))[_i377].read(iprot);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -7916,14 +9274,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  this->group_names.clear();
    - uint32_t _size364;
    - ::apache::thrift::protocol::TType _etype367;
    - xfer += iprot->readListBegin(_etype367, _size364);
    - this->group_names.resize(_size364);
    - uint32_t _i368;
    - for (_i368 = 0; _i368 < _size364; ++_i368)
    + uint32_t _size378;
    + ::apache::thrift::protocol::TType _etype381;
    + xfer += iprot->readListBegin(_etype381, _size378);
    + this->group_names.resize(_size378);
    + uint32_t _i382;
    + for (_i382 = 0; _i382 < _size378; ++_i382)
                  {
    - xfer += iprot->readString(this->group_names[_i368]);
    + xfer += iprot->readString(this->group_names[_i382]);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -7967,10 +9325,10 @@ uint32_t ThriftHiveMetastore_get_partiti
        xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
        {
          xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->group_names.size()));
    - std::vector<std::string> ::const_iterator _iter369;
    - for (_iter369 = this->group_names.begin(); _iter369 != this->group_names.end(); ++_iter369)
    + std::vector<std::string> ::const_iterator _iter383;
    + for (_iter383 = this->group_names.begin(); _iter383 != this->group_names.end(); ++_iter383)
          {
    - xfer += oprot->writeString((*_iter369));
    + xfer += oprot->writeString((*_iter383));
          }
          xfer += oprot->writeListEnd();
        }
    @@ -8004,10 +9362,10 @@ uint32_t ThriftHiveMetastore_get_partiti
        xfer += oprot->writeFieldBegin("group_names", ::apache::thrift::protocol::T_LIST, 5);
        {
          xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->group_names)).size()));
    - std::vector<std::string> ::const_iterator _iter370;
    - for (_iter370 = (*(this->group_names)).begin(); _iter370 != (*(this->group_names)).end(); ++_iter370)
    + std::vector<std::string> ::const_iterator _iter384;
    + for (_iter384 = (*(this->group_names)).begin(); _iter384 != (*(this->group_names)).end(); ++_iter384)
          {
    - xfer += oprot->writeString((*_iter370));
    + xfer += oprot->writeString((*_iter384));
          }
          xfer += oprot->writeListEnd();
        }
    @@ -8042,14 +9400,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  this->success.clear();
    - uint32_t _size371;
    - ::apache::thrift::protocol::TType _etype374;
    - xfer += iprot->readListBegin(_etype374, _size371);
    - this->success.resize(_size371);
    - uint32_t _i375;
    - for (_i375 = 0; _i375 < _size371; ++_i375)
    + uint32_t _size385;
    + ::apache::thrift::protocol::TType _etype388;
    + xfer += iprot->readListBegin(_etype388, _size385);
    + this->success.resize(_size385);
    + uint32_t _i389;
    + for (_i389 = 0; _i389 < _size385; ++_i389)
                  {
    - xfer += this->success[_i375].read(iprot);
    + xfer += this->success[_i389].read(iprot);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -8096,10 +9454,10 @@ uint32_t ThriftHiveMetastore_get_partiti
          xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
          {
            xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
    - std::vector<Partition> ::const_iterator _iter376;
    - for (_iter376 = this->success.begin(); _iter376 != this->success.end(); ++_iter376)
    + std::vector<Partition> ::const_iterator _iter390;
    + for (_iter390 = this->success.begin(); _iter390 != this->success.end(); ++_iter390)
            {
    - xfer += (*_iter376).write(oprot);
    + xfer += (*_iter390).write(oprot);
            }
            xfer += oprot->writeListEnd();
          }
    @@ -8142,14 +9500,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  (*(this->success)).clear();
    - uint32_t _size377;
    - ::apache::thrift::protocol::TType _etype380;
    - xfer += iprot->readListBegin(_etype380, _size377);
    - (*(this->success)).resize(_size377);
    - uint32_t _i381;
    - for (_i381 = 0; _i381 < _size377; ++_i381)
    + uint32_t _size391;
    + ::apache::thrift::protocol::TType _etype394;
    + xfer += iprot->readListBegin(_etype394, _size391);
    + (*(this->success)).resize(_size391);
    + uint32_t _i395;
    + for (_i395 = 0; _i395 < _size391; ++_i395)
                  {
    - xfer += (*(this->success))[_i381].read(iprot);
    + xfer += (*(this->success))[_i395].read(iprot);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -8308,14 +9666,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  this->success.clear();
    - uint32_t _size382;
    - ::apache::thrift::protocol::TType _etype385;
    - xfer += iprot->readListBegin(_etype385, _size382);
    - this->success.resize(_size382);
    - uint32_t _i386;
    - for (_i386 = 0; _i386 < _size382; ++_i386)
    + uint32_t _size396;
    + ::apache::thrift::protocol::TType _etype399;
    + xfer += iprot->readListBegin(_etype399, _size396);
    + this->success.resize(_size396);
    + uint32_t _i400;
    + for (_i400 = 0; _i400 < _size396; ++_i400)
                  {
    - xfer += iprot->readString(this->success[_i386]);
    + xfer += iprot->readString(this->success[_i400]);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -8354,10 +9712,10 @@ uint32_t ThriftHiveMetastore_get_partiti
          xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
          {
            xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->success.size()));
    - std::vector<std::string> ::const_iterator _iter387;
    - for (_iter387 = this->success.begin(); _iter387 != this->success.end(); ++_iter387)
    + std::vector<std::string> ::const_iterator _iter401;
    + for (_iter401 = this->success.begin(); _iter401 != this->success.end(); ++_iter401)
            {
    - xfer += oprot->writeString((*_iter387));
    + xfer += oprot->writeString((*_iter401));
            }
            xfer += oprot->writeListEnd();
          }
    @@ -8396,14 +9754,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  (*(this->success)).clear();
    - uint32_t _size388;
    - ::apache::thrift::protocol::TType _etype391;
    - xfer += iprot->readListBegin(_etype391, _size388);
    - (*(this->success)).resize(_size388);
    - uint32_t _i392;
    - for (_i392 = 0; _i392 < _size388; ++_i392)
    + uint32_t _size402;
    + ::apache::thrift::protocol::TType _etype405;
    + xfer += iprot->readListBegin(_etype405, _size402);
    + (*(this->success)).resize(_size402);
    + uint32_t _i406;
    + for (_i406 = 0; _i406 < _size402; ++_i406)
                  {
    - xfer += iprot->readString((*(this->success))[_i392]);
    + xfer += iprot->readString((*(this->success))[_i406]);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -8472,14 +9830,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  this->part_vals.clear();
    - uint32_t _size393;
    - ::apache::thrift::protocol::TType _etype396;
    - xfer += iprot->readListBegin(_etype396, _size393);
    - this->part_vals.resize(_size393);
    - uint32_t _i397;
    - for (_i397 = 0; _i397 < _size393; ++_i397)
    + uint32_t _size407;
    + ::apache::thrift::protocol::TType _etype410;
    + xfer += iprot->readListBegin(_etype410, _size407);
    + this->part_vals.resize(_size407);
    + uint32_t _i411;
    + for (_i411 = 0; _i411 < _size407; ++_i411)
                  {
    - xfer += iprot->readString(this->part_vals[_i397]);
    + xfer += iprot->readString(this->part_vals[_i411]);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -8523,10 +9881,10 @@ uint32_t ThriftHiveMetastore_get_partiti
        xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
        {
          xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>(this->part_vals.size()));
    - std::vector<std::string> ::const_iterator _iter398;
    - for (_iter398 = this->part_vals.begin(); _iter398 != this->part_vals.end(); ++_iter398)
    + std::vector<std::string> ::const_iterator _iter412;
    + for (_iter412 = this->part_vals.begin(); _iter412 != this->part_vals.end(); ++_iter412)
          {
    - xfer += oprot->writeString((*_iter398));
    + xfer += oprot->writeString((*_iter412));
          }
          xfer += oprot->writeListEnd();
        }
    @@ -8556,10 +9914,10 @@ uint32_t ThriftHiveMetastore_get_partiti
        xfer += oprot->writeFieldBegin("part_vals", ::apache::thrift::protocol::T_LIST, 3);
        {
          xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, static_cast<uint32_t>((*(this->part_vals)).size()));
    - std::vector<std::string> ::const_iterator _iter399;
    - for (_iter399 = (*(this->part_vals)).begin(); _iter399 != (*(this->part_vals)).end(); ++_iter399)
    + std::vector<std::string> ::const_iterator _iter413;
    + for (_iter413 = (*(this->part_vals)).begin(); _iter413 != (*(this->part_vals)).end(); ++_iter413)
          {
    - xfer += oprot->writeString((*_iter399));
    + xfer += oprot->writeString((*_iter413));
          }
          xfer += oprot->writeListEnd();
        }
    @@ -8598,14 +9956,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  this->success.clear();
    - uint32_t _size400;
    - ::apache::thrift::protocol::TType _etype403;
    - xfer += iprot->readListBegin(_etype403, _size400);
    - this->success.resize(_size400);
    - uint32_t _i404;
    - for (_i404 = 0; _i404 < _size400; ++_i404)
    + uint32_t _size414;
    + ::apache::thrift::protocol::TType _etype417;
    + xfer += iprot->readListBegin(_etype417, _size414);
    + this->success.resize(_size414);
    + uint32_t _i418;
    + for (_i418 = 0; _i418 < _size414; ++_i418)
                  {
    - xfer += this->success[_i404].read(iprot);
    + xfer += this->success[_i418].read(iprot);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -8652,10 +10010,10 @@ uint32_t ThriftHiveMetastore_get_partiti
          xfer += oprot->writeFieldBegin("success", ::apache::thrift::protocol::T_LIST, 0);
          {
            xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRUCT, static_cast<uint32_t>(this->success.size()));
    - std::vector<Partition> ::const_iterator _iter405;
    - for (_iter405 = this->success.begin(); _iter405 != this->success.end(); ++_iter405)
    + std::vector<Partition> ::const_iterator _iter419;
    + for (_iter419 = this->success.begin(); _iter419 != this->success.end(); ++_iter419)
            {
    - xfer += (*_iter405).write(oprot);
    + xfer += (*_iter419).write(oprot);
            }
            xfer += oprot->writeListEnd();
          }
    @@ -8698,14 +10056,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  (*(this->success)).clear();
    - uint32_t _size406;
    - ::apache::thrift::protocol::TType _etype409;
    - xfer += iprot->readListBegin(_etype409, _size406);
    - (*(this->success)).resize(_size406);
    - uint32_t _i410;
    - for (_i410 = 0; _i410 < _size406; ++_i410)
    + uint32_t _size420;
    + ::apache::thrift::protocol::TType _etype423;
    + xfer += iprot->readListBegin(_etype423, _size420);
    + (*(this->success)).resize(_size420);
    + uint32_t _i424;
    + for (_i424 = 0; _i424 < _size420; ++_i424)
                  {
    - xfer += (*(this->success))[_i410].read(iprot);
    + xfer += (*(this->success))[_i424].read(iprot);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -8782,14 +10140,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  this->part_vals.clear();
    - uint32_t _size411;
    - ::apache::thrift::protocol::TType _etype414;
    - xfer += iprot->readListBegin(_etype414, _size411);
    - this->part_vals.resize(_size411);
    - uint32_t _i415;
    - for (_i415 = 0; _i415 < _size411; ++_i415)
    + uint32_t _size425;
    + ::apache::thrift::protocol::TType _etype428;
    + xfer += iprot->readListBegin(_etype428, _size425);
    + this->part_vals.resize(_size425);
    + uint32_t _i429;
    + for (_i429 = 0; _i429 < _size425; ++_i429)
                  {
    - xfer += iprot->readString(this->part_vals[_i415]);
    + xfer += iprot->readString(this->part_vals[_i429]);
                  }
                  xfer += iprot->readListEnd();
                }
    @@ -8818,14 +10176,14 @@ uint32_t ThriftHiveMetastore_get_partiti
              if (ftype == ::apache::thrift::protocol::T_LIST) {
                {
                  this->group_names.clear();
    - uint32_t _size416;

    [... 2115 lines stripped ...]
  • Kevinwilfong at Feb 12, 2013 at 6:53 pm
    Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java Tue Feb 12 18:52:55 2013
    @@ -68,6 +68,8 @@ public class ThriftHiveMetastore {

          public void drop_table(String dbname, String name, boolean deleteData) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;

    + public void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
    +
          public List<String> get_tables(String db_name, String pattern) throws MetaException, org.apache.thrift.TException;

          public List<String> get_all_tables(String db_name) throws MetaException, org.apache.thrift.TException;
    @@ -90,12 +92,20 @@ public class ThriftHiveMetastore {

          public Partition append_partition(String db_name, String tbl_name, List<String> part_vals) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException;

    + public Partition append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException;
    +
          public Partition append_partition_by_name(String db_name, String tbl_name, String part_name) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException;

    + public Partition append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException;
    +
          public boolean drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;

    + public boolean drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
    +
          public boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;

    + public boolean drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
    +
          public Partition get_partition(String db_name, String tbl_name, List<String> part_vals) throws MetaException, NoSuchObjectException, org.apache.thrift.TException;

          public Partition get_partition_with_auth(String db_name, String tbl_name, List<String> part_vals, String user_name, List<String> group_names) throws MetaException, NoSuchObjectException, org.apache.thrift.TException;
    @@ -222,6 +232,8 @@ public class ThriftHiveMetastore {

          public void drop_table(String dbname, String name, boolean deleteData, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_table_call> resultHandler) throws org.apache.thrift.TException;

    + public void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_table_with_environment_context_call> resultHandler) throws org.apache.thrift.TException;
    +
          public void get_tables(String db_name, String pattern, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.get_tables_call> resultHandler) throws org.apache.thrift.TException;

          public void get_all_tables(String db_name, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.get_all_tables_call> resultHandler) throws org.apache.thrift.TException;
    @@ -244,12 +256,20 @@ public class ThriftHiveMetastore {

          public void append_partition(String db_name, String tbl_name, List<String> part_vals, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.append_partition_call> resultHandler) throws org.apache.thrift.TException;

    + public void append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.append_partition_with_environment_context_call> resultHandler) throws org.apache.thrift.TException;
    +
          public void append_partition_by_name(String db_name, String tbl_name, String part_name, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.append_partition_by_name_call> resultHandler) throws org.apache.thrift.TException;

    + public void append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.append_partition_by_name_with_environment_context_call> resultHandler) throws org.apache.thrift.TException;
    +
          public void drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_partition_call> resultHandler) throws org.apache.thrift.TException;

    + public void drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_partition_with_environment_context_call> resultHandler) throws org.apache.thrift.TException;
    +
          public void drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_partition_by_name_call> resultHandler) throws org.apache.thrift.TException;

    + public void drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_partition_by_name_with_environment_context_call> resultHandler) throws org.apache.thrift.TException;
    +
          public void get_partition(String db_name, String tbl_name, List<String> part_vals, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.get_partition_call> resultHandler) throws org.apache.thrift.TException;

          public void get_partition_with_auth(String db_name, String tbl_name, List<String> part_vals, String user_name, List<String> group_names, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.get_partition_with_auth_call> resultHandler) throws org.apache.thrift.TException;
    @@ -806,6 +826,35 @@ public class ThriftHiveMetastore {
            return;
          }

    + public void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
    + {
    + send_drop_table_with_environment_context(dbname, name, deleteData, environment_context);
    + recv_drop_table_with_environment_context();
    + }
    +
    + public void send_drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context) throws org.apache.thrift.TException
    + {
    + drop_table_with_environment_context_args args = new drop_table_with_environment_context_args();
    + args.setDbname(dbname);
    + args.setName(name);
    + args.setDeleteData(deleteData);
    + args.setEnvironment_context(environment_context);
    + sendBase("drop_table_with_environment_context", args);
    + }
    +
    + public void recv_drop_table_with_environment_context() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
    + {
    + drop_table_with_environment_context_result result = new drop_table_with_environment_context_result();
    + receiveBase(result, "drop_table_with_environment_context");
    + if (result.o1 != null) {
    + throw result.o1;
    + }
    + if (result.o3 != null) {
    + throw result.o3;
    + }
    + return;
    + }
    +
          public List<String> get_tables(String db_name, String pattern) throws MetaException, org.apache.thrift.TException
          {
            send_get_tables(db_name, pattern);
    @@ -1144,6 +1193,41 @@ public class ThriftHiveMetastore {
            throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "append_partition failed: unknown result");
          }

    + public Partition append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException
    + {
    + send_append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context);
    + return recv_append_partition_with_environment_context();
    + }
    +
    + public void send_append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context) throws org.apache.thrift.TException
    + {
    + append_partition_with_environment_context_args args = new append_partition_with_environment_context_args();
    + args.setDb_name(db_name);
    + args.setTbl_name(tbl_name);
    + args.setPart_vals(part_vals);
    + args.setEnvironment_context(environment_context);
    + sendBase("append_partition_with_environment_context", args);
    + }
    +
    + public Partition recv_append_partition_with_environment_context() throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException
    + {
    + append_partition_with_environment_context_result result = new append_partition_with_environment_context_result();
    + receiveBase(result, "append_partition_with_environment_context");
    + if (result.isSetSuccess()) {
    + return result.success;
    + }
    + if (result.o1 != null) {
    + throw result.o1;
    + }
    + if (result.o2 != null) {
    + throw result.o2;
    + }
    + if (result.o3 != null) {
    + throw result.o3;
    + }
    + throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "append_partition_with_environment_context failed: unknown result");
    + }
    +
          public Partition append_partition_by_name(String db_name, String tbl_name, String part_name) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException
          {
            send_append_partition_by_name(db_name, tbl_name, part_name);
    @@ -1178,6 +1262,41 @@ public class ThriftHiveMetastore {
            throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "append_partition_by_name failed: unknown result");
          }

    + public Partition append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException
    + {
    + send_append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, environment_context);
    + return recv_append_partition_by_name_with_environment_context();
    + }
    +
    + public void send_append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context) throws org.apache.thrift.TException
    + {
    + append_partition_by_name_with_environment_context_args args = new append_partition_by_name_with_environment_context_args();
    + args.setDb_name(db_name);
    + args.setTbl_name(tbl_name);
    + args.setPart_name(part_name);
    + args.setEnvironment_context(environment_context);
    + sendBase("append_partition_by_name_with_environment_context", args);
    + }
    +
    + public Partition recv_append_partition_by_name_with_environment_context() throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException
    + {
    + append_partition_by_name_with_environment_context_result result = new append_partition_by_name_with_environment_context_result();
    + receiveBase(result, "append_partition_by_name_with_environment_context");
    + if (result.isSetSuccess()) {
    + return result.success;
    + }
    + if (result.o1 != null) {
    + throw result.o1;
    + }
    + if (result.o2 != null) {
    + throw result.o2;
    + }
    + if (result.o3 != null) {
    + throw result.o3;
    + }
    + throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "append_partition_by_name_with_environment_context failed: unknown result");
    + }
    +
          public boolean drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
          {
            send_drop_partition(db_name, tbl_name, part_vals, deleteData);
    @@ -1210,6 +1329,39 @@ public class ThriftHiveMetastore {
            throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "drop_partition failed: unknown result");
          }

    + public boolean drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
    + {
    + send_drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context);
    + return recv_drop_partition_with_environment_context();
    + }
    +
    + public void send_drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context) throws org.apache.thrift.TException
    + {
    + drop_partition_with_environment_context_args args = new drop_partition_with_environment_context_args();
    + args.setDb_name(db_name);
    + args.setTbl_name(tbl_name);
    + args.setPart_vals(part_vals);
    + args.setDeleteData(deleteData);
    + args.setEnvironment_context(environment_context);
    + sendBase("drop_partition_with_environment_context", args);
    + }
    +
    + public boolean recv_drop_partition_with_environment_context() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
    + {
    + drop_partition_with_environment_context_result result = new drop_partition_with_environment_context_result();
    + receiveBase(result, "drop_partition_with_environment_context");
    + if (result.isSetSuccess()) {
    + return result.success;
    + }
    + if (result.o1 != null) {
    + throw result.o1;
    + }
    + if (result.o2 != null) {
    + throw result.o2;
    + }
    + throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "drop_partition_with_environment_context failed: unknown result");
    + }
    +
          public boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
          {
            send_drop_partition_by_name(db_name, tbl_name, part_name, deleteData);
    @@ -1242,6 +1394,39 @@ public class ThriftHiveMetastore {
            throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "drop_partition_by_name failed: unknown result");
          }

    + public boolean drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
    + {
    + send_drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context);
    + return recv_drop_partition_by_name_with_environment_context();
    + }
    +
    + public void send_drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context) throws org.apache.thrift.TException
    + {
    + drop_partition_by_name_with_environment_context_args args = new drop_partition_by_name_with_environment_context_args();
    + args.setDb_name(db_name);
    + args.setTbl_name(tbl_name);
    + args.setPart_name(part_name);
    + args.setDeleteData(deleteData);
    + args.setEnvironment_context(environment_context);
    + sendBase("drop_partition_by_name_with_environment_context", args);
    + }
    +
    + public boolean recv_drop_partition_by_name_with_environment_context() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
    + {
    + drop_partition_by_name_with_environment_context_result result = new drop_partition_by_name_with_environment_context_result();
    + receiveBase(result, "drop_partition_by_name_with_environment_context");
    + if (result.isSetSuccess()) {
    + return result.success;
    + }
    + if (result.o1 != null) {
    + throw result.o1;
    + }
    + if (result.o2 != null) {
    + throw result.o2;
    + }
    + throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "drop_partition_by_name_with_environment_context failed: unknown result");
    + }
    +
          public Partition get_partition(String db_name, String tbl_name, List<String> part_vals) throws MetaException, NoSuchObjectException, org.apache.thrift.TException
          {
            send_get_partition(db_name, tbl_name, part_vals);
    @@ -3165,6 +3350,47 @@ public class ThriftHiveMetastore {
            }
          }

    + public void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_table_with_environment_context_call> resultHandler) throws org.apache.thrift.TException {
    + checkReady();
    + drop_table_with_environment_context_call method_call = new drop_table_with_environment_context_call(dbname, name, deleteData, environment_context, resultHandler, this, ___protocolFactory, ___transport);
    + this.___currentMethod = method_call;
    + ___manager.call(method_call);
    + }
    +
    + public static class drop_table_with_environment_context_call extends org.apache.thrift.async.TAsyncMethodCall {
    + private String dbname;
    + private String name;
    + private boolean deleteData;
    + private EnvironmentContext environment_context;
    + public drop_table_with_environment_context_call(String dbname, String name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_table_with_environment_context_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
    + super(client, protocolFactory, transport, resultHandler, false);
    + this.dbname = dbname;
    + this.name = name;
    + this.deleteData = deleteData;
    + this.environment_context = environment_context;
    + }
    +
    + public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
    + prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("drop_table_with_environment_context", org.apache.thrift.protocol.TMessageType.CALL, 0));
    + drop_table_with_environment_context_args args = new drop_table_with_environment_context_args();
    + args.setDbname(dbname);
    + args.setName(name);
    + args.setDeleteData(deleteData);
    + args.setEnvironment_context(environment_context);
    + args.write(prot);
    + prot.writeMessageEnd();
    + }
    +
    + public void getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
    + if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
    + throw new IllegalStateException("Method call not finished!");
    + }
    + org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
    + org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
    + (new Client(prot)).recv_drop_table_with_environment_context();
    + }
    + }
    +
          public void get_tables(String db_name, String pattern, org.apache.thrift.async.AsyncMethodCallback<get_tables_call> resultHandler) throws org.apache.thrift.TException {
            checkReady();
            get_tables_call method_call = new get_tables_call(db_name, pattern, resultHandler, this, ___protocolFactory, ___transport);
    @@ -3556,6 +3782,47 @@ public class ThriftHiveMetastore {
            }
          }

    + public void append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<append_partition_with_environment_context_call> resultHandler) throws org.apache.thrift.TException {
    + checkReady();
    + append_partition_with_environment_context_call method_call = new append_partition_with_environment_context_call(db_name, tbl_name, part_vals, environment_context, resultHandler, this, ___protocolFactory, ___transport);
    + this.___currentMethod = method_call;
    + ___manager.call(method_call);
    + }
    +
    + public static class append_partition_with_environment_context_call extends org.apache.thrift.async.TAsyncMethodCall {
    + private String db_name;
    + private String tbl_name;
    + private List<String> part_vals;
    + private EnvironmentContext environment_context;
    + public append_partition_with_environment_context_call(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<append_partition_with_environment_context_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
    + super(client, protocolFactory, transport, resultHandler, false);
    + this.db_name = db_name;
    + this.tbl_name = tbl_name;
    + this.part_vals = part_vals;
    + this.environment_context = environment_context;
    + }
    +
    + public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
    + prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("append_partition_with_environment_context", org.apache.thrift.protocol.TMessageType.CALL, 0));
    + append_partition_with_environment_context_args args = new append_partition_with_environment_context_args();
    + args.setDb_name(db_name);
    + args.setTbl_name(tbl_name);
    + args.setPart_vals(part_vals);
    + args.setEnvironment_context(environment_context);
    + args.write(prot);
    + prot.writeMessageEnd();
    + }
    +
    + public Partition getResult() throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException {
    + if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
    + throw new IllegalStateException("Method call not finished!");
    + }
    + org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
    + org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
    + return (new Client(prot)).recv_append_partition_with_environment_context();
    + }
    + }
    +
          public void append_partition_by_name(String db_name, String tbl_name, String part_name, org.apache.thrift.async.AsyncMethodCallback<append_partition_by_name_call> resultHandler) throws org.apache.thrift.TException {
            checkReady();
            append_partition_by_name_call method_call = new append_partition_by_name_call(db_name, tbl_name, part_name, resultHandler, this, ___protocolFactory, ___transport);
    @@ -3594,6 +3861,47 @@ public class ThriftHiveMetastore {
            }
          }

    + public void append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<append_partition_by_name_with_environment_context_call> resultHandler) throws org.apache.thrift.TException {
    + checkReady();
    + append_partition_by_name_with_environment_context_call method_call = new append_partition_by_name_with_environment_context_call(db_name, tbl_name, part_name, environment_context, resultHandler, this, ___protocolFactory, ___transport);
    + this.___currentMethod = method_call;
    + ___manager.call(method_call);
    + }
    +
    + public static class append_partition_by_name_with_environment_context_call extends org.apache.thrift.async.TAsyncMethodCall {
    + private String db_name;
    + private String tbl_name;
    + private String part_name;
    + private EnvironmentContext environment_context;
    + public append_partition_by_name_with_environment_context_call(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<append_partition_by_name_with_environment_context_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
    + super(client, protocolFactory, transport, resultHandler, false);
    + this.db_name = db_name;
    + this.tbl_name = tbl_name;
    + this.part_name = part_name;
    + this.environment_context = environment_context;
    + }
    +
    + public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
    + prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("append_partition_by_name_with_environment_context", org.apache.thrift.protocol.TMessageType.CALL, 0));
    + append_partition_by_name_with_environment_context_args args = new append_partition_by_name_with_environment_context_args();
    + args.setDb_name(db_name);
    + args.setTbl_name(tbl_name);
    + args.setPart_name(part_name);
    + args.setEnvironment_context(environment_context);
    + args.write(prot);
    + prot.writeMessageEnd();
    + }
    +
    + public Partition getResult() throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException {
    + if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
    + throw new IllegalStateException("Method call not finished!");
    + }
    + org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
    + org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
    + return (new Client(prot)).recv_append_partition_by_name_with_environment_context();
    + }
    + }
    +
          public void drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, org.apache.thrift.async.AsyncMethodCallback<drop_partition_call> resultHandler) throws org.apache.thrift.TException {
            checkReady();
            drop_partition_call method_call = new drop_partition_call(db_name, tbl_name, part_vals, deleteData, resultHandler, this, ___protocolFactory, ___transport);
    @@ -3635,6 +3943,50 @@ public class ThriftHiveMetastore {
            }
          }

    + public void drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_partition_with_environment_context_call> resultHandler) throws org.apache.thrift.TException {
    + checkReady();
    + drop_partition_with_environment_context_call method_call = new drop_partition_with_environment_context_call(db_name, tbl_name, part_vals, deleteData, environment_context, resultHandler, this, ___protocolFactory, ___transport);
    + this.___currentMethod = method_call;
    + ___manager.call(method_call);
    + }
    +
    + public static class drop_partition_with_environment_context_call extends org.apache.thrift.async.TAsyncMethodCall {
    + private String db_name;
    + private String tbl_name;
    + private List<String> part_vals;
    + private boolean deleteData;
    + private EnvironmentContext environment_context;
    + public drop_partition_with_environment_context_call(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_partition_with_environment_context_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
    + super(client, protocolFactory, transport, resultHandler, false);
    + this.db_name = db_name;
    + this.tbl_name = tbl_name;
    + this.part_vals = part_vals;
    + this.deleteData = deleteData;
    + this.environment_context = environment_context;
    + }
    +
    + public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
    + prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("drop_partition_with_environment_context", org.apache.thrift.protocol.TMessageType.CALL, 0));
    + drop_partition_with_environment_context_args args = new drop_partition_with_environment_context_args();
    + args.setDb_name(db_name);
    + args.setTbl_name(tbl_name);
    + args.setPart_vals(part_vals);
    + args.setDeleteData(deleteData);
    + args.setEnvironment_context(environment_context);
    + args.write(prot);
    + prot.writeMessageEnd();
    + }
    +
    + public boolean getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
    + if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
    + throw new IllegalStateException("Method call not finished!");
    + }
    + org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
    + org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
    + return (new Client(prot)).recv_drop_partition_with_environment_context();
    + }
    + }
    +
          public void drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData, org.apache.thrift.async.AsyncMethodCallback<drop_partition_by_name_call> resultHandler) throws org.apache.thrift.TException {
            checkReady();
            drop_partition_by_name_call method_call = new drop_partition_by_name_call(db_name, tbl_name, part_name, deleteData, resultHandler, this, ___protocolFactory, ___transport);
    @@ -3676,6 +4028,50 @@ public class ThriftHiveMetastore {
            }
          }

    + public void drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_partition_by_name_with_environment_context_call> resultHandler) throws org.apache.thrift.TException {
    + checkReady();
    + drop_partition_by_name_with_environment_context_call method_call = new drop_partition_by_name_with_environment_context_call(db_name, tbl_name, part_name, deleteData, environment_context, resultHandler, this, ___protocolFactory, ___transport);
    + this.___currentMethod = method_call;
    + ___manager.call(method_call);
    + }
    +
    + public static class drop_partition_by_name_with_environment_context_call extends org.apache.thrift.async.TAsyncMethodCall {
    + private String db_name;
    + private String tbl_name;
    + private String part_name;
    + private boolean deleteData;
    + private EnvironmentContext environment_context;
    + public drop_partition_by_name_with_environment_context_call(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_partition_by_name_with_environment_context_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
    + super(client, protocolFactory, transport, resultHandler, false);
    + this.db_name = db_name;
    + this.tbl_name = tbl_name;
    + this.part_name = part_name;
    + this.deleteData = deleteData;
    + this.environment_context = environment_context;
    + }
    +
    + public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
    + prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("drop_partition_by_name_with_environment_context", org.apache.thrift.protocol.TMessageType.CALL, 0));
    + drop_partition_by_name_with_environment_context_args args = new drop_partition_by_name_with_environment_context_args();
    + args.setDb_name(db_name);
    + args.setTbl_name(tbl_name);
    + args.setPart_name(part_name);
    + args.setDeleteData(deleteData);
    + args.setEnvironment_context(environment_context);
    + args.write(prot);
    + prot.writeMessageEnd();
    + }
    +
    + public boolean getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
    + if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
    + throw new IllegalStateException("Method call not finished!");
    + }
    + org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
    + org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
    + return (new Client(prot)).recv_drop_partition_by_name_with_environment_context();
    + }
    + }
    +
          public void get_partition(String db_name, String tbl_name, List<String> part_vals, org.apache.thrift.async.AsyncMethodCallback<get_partition_call> resultHandler) throws org.apache.thrift.TException {
            checkReady();
            get_partition_call method_call = new get_partition_call(db_name, tbl_name, part_vals, resultHandler, this, ___protocolFactory, ___transport);
    @@ -5431,6 +5827,7 @@ public class ThriftHiveMetastore {
            processMap.put("create_table", new create_table());
            processMap.put("create_table_with_environment_context", new create_table_with_environment_context());
            processMap.put("drop_table", new drop_table());
    + processMap.put("drop_table_with_environment_context", new drop_table_with_environment_context());
            processMap.put("get_tables", new get_tables());
            processMap.put("get_all_tables", new get_all_tables());
            processMap.put("get_table", new get_table());
    @@ -5442,9 +5839,13 @@ public class ThriftHiveMetastore {
            processMap.put("add_partition_with_environment_context", new add_partition_with_environment_context());
            processMap.put("add_partitions", new add_partitions());
            processMap.put("append_partition", new append_partition());
    + processMap.put("append_partition_with_environment_context", new append_partition_with_environment_context());
            processMap.put("append_partition_by_name", new append_partition_by_name());
    + processMap.put("append_partition_by_name_with_environment_context", new append_partition_by_name_with_environment_context());
            processMap.put("drop_partition", new drop_partition());
    + processMap.put("drop_partition_with_environment_context", new drop_partition_with_environment_context());
            processMap.put("drop_partition_by_name", new drop_partition_by_name());
    + processMap.put("drop_partition_by_name_with_environment_context", new drop_partition_by_name_with_environment_context());
            processMap.put("get_partition", new get_partition());
            processMap.put("get_partition_with_auth", new get_partition_with_auth());
            processMap.put("get_partition_by_name", new get_partition_by_name());
    @@ -5898,6 +6299,32 @@ public class ThriftHiveMetastore {
            }
          }

    + public static class drop_table_with_environment_context<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_table_with_environment_context_args> {
    + public drop_table_with_environment_context() {
    + super("drop_table_with_environment_context");
    + }
    +
    + public drop_table_with_environment_context_args getEmptyArgsInstance() {
    + return new drop_table_with_environment_context_args();
    + }
    +
    + protected boolean isOneway() {
    + return false;
    + }
    +
    + public drop_table_with_environment_context_result getResult(I iface, drop_table_with_environment_context_args args) throws org.apache.thrift.TException {
    + drop_table_with_environment_context_result result = new drop_table_with_environment_context_result();
    + try {
    + iface.drop_table_with_environment_context(args.dbname, args.name, args.deleteData, args.environment_context);
    + } catch (NoSuchObjectException o1) {
    + result.o1 = o1;
    + } catch (MetaException o3) {
    + result.o3 = o3;
    + }
    + return result;
    + }
    + }
    +
          public static class get_tables<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_tables_args> {
            public get_tables() {
              super("get_tables");
    @@ -6193,6 +6620,34 @@ public class ThriftHiveMetastore {
            }
          }

    + public static class append_partition_with_environment_context<I extends Iface> extends org.apache.thrift.ProcessFunction<I, append_partition_with_environment_context_args> {
    + public append_partition_with_environment_context() {
    + super("append_partition_with_environment_context");
    + }
    +
    + public append_partition_with_environment_context_args getEmptyArgsInstance() {
    + return new append_partition_with_environment_context_args();
    + }
    +
    + protected boolean isOneway() {
    + return false;
    + }
    +
    + public append_partition_with_environment_context_result getResult(I iface, append_partition_with_environment_context_args args) throws org.apache.thrift.TException {
    + append_partition_with_environment_context_result result = new append_partition_with_environment_context_result();
    + try {
    + result.success = iface.append_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.environment_context);
    + } catch (InvalidObjectException o1) {
    + result.o1 = o1;
    + } catch (AlreadyExistsException o2) {
    + result.o2 = o2;
    + } catch (MetaException o3) {
    + result.o3 = o3;
    + }
    + return result;
    + }
    + }
    +
          public static class append_partition_by_name<I extends Iface> extends org.apache.thrift.ProcessFunction<I, append_partition_by_name_args> {
            public append_partition_by_name() {
              super("append_partition_by_name");
    @@ -6221,6 +6676,34 @@ public class ThriftHiveMetastore {
            }
          }

    + public static class append_partition_by_name_with_environment_context<I extends Iface> extends org.apache.thrift.ProcessFunction<I, append_partition_by_name_with_environment_context_args> {
    + public append_partition_by_name_with_environment_context() {
    + super("append_partition_by_name_with_environment_context");
    + }
    +
    + public append_partition_by_name_with_environment_context_args getEmptyArgsInstance() {
    + return new append_partition_by_name_with_environment_context_args();
    + }
    +
    + protected boolean isOneway() {
    + return false;
    + }
    +
    + public append_partition_by_name_with_environment_context_result getResult(I iface, append_partition_by_name_with_environment_context_args args) throws org.apache.thrift.TException {
    + append_partition_by_name_with_environment_context_result result = new append_partition_by_name_with_environment_context_result();
    + try {
    + result.success = iface.append_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.environment_context);
    + } catch (InvalidObjectException o1) {
    + result.o1 = o1;
    + } catch (AlreadyExistsException o2) {
    + result.o2 = o2;
    + } catch (MetaException o3) {
    + result.o3 = o3;
    + }
    + return result;
    + }
    + }
    +
          public static class drop_partition<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_partition_args> {
            public drop_partition() {
              super("drop_partition");
    @@ -6248,6 +6731,33 @@ public class ThriftHiveMetastore {
            }
          }

    + public static class drop_partition_with_environment_context<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_partition_with_environment_context_args> {
    + public drop_partition_with_environment_context() {
    + super("drop_partition_with_environment_context");
    + }
    +
    + public drop_partition_with_environment_context_args getEmptyArgsInstance() {
    + return new drop_partition_with_environment_context_args();
    + }
    +
    + protected boolean isOneway() {
    + return false;
    + }
    +
    + public drop_partition_with_environment_context_result getResult(I iface, drop_partition_with_environment_context_args args) throws org.apache.thrift.TException {
    + drop_partition_with_environment_context_result result = new drop_partition_with_environment_context_result();
    + try {
    + result.success = iface.drop_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.deleteData, args.environment_context);
    + result.setSuccessIsSet(true);
    + } catch (NoSuchObjectException o1) {
    + result.o1 = o1;
    + } catch (MetaException o2) {
    + result.o2 = o2;
    + }
    + return result;
    + }
    + }
    +
          public static class drop_partition_by_name<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_partition_by_name_args> {
            public drop_partition_by_name() {
              super("drop_partition_by_name");
    @@ -6275,6 +6785,33 @@ public class ThriftHiveMetastore {
            }
          }

    + public static class drop_partition_by_name_with_environment_context<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_partition_by_name_with_environment_context_args> {
    + public drop_partition_by_name_with_environment_context() {
    + super("drop_partition_by_name_with_environment_context");
    + }
    +
    + public drop_partition_by_name_with_environment_context_args getEmptyArgsInstance() {
    + return new drop_partition_by_name_with_environment_context_args();
    + }
    +
    + protected boolean isOneway() {
    + return false;
    + }
    +
    + public drop_partition_by_name_with_environment_context_result getResult(I iface, drop_partition_by_name_with_environment_context_args args) throws org.apache.thrift.TException {
    + drop_partition_by_name_with_environment_context_result result = new drop_partition_by_name_with_environment_context_result();
    + try {
    + result.success = iface.drop_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.deleteData, args.environment_context);
    + result.setSuccessIsSet(true);
    + } catch (NoSuchObjectException o1) {
    + result.o1 = o1;
    + } catch (MetaException o2) {
    + result.o2 = o2;
    + }
    + return result;
    + }
    + }
    +
          public static class get_partition<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_partition_args> {
            public get_partition() {
              super("get_partition");
    @@ -16721,7 +17258,7 @@ public class ThriftHiveMetastore {
                        for (int _i273 = 0; _i273 < _map272.size; ++_i273)
                        {
                          String _key274; // required
    - Type _val275; // optional
    + Type _val275; // required
                          _key274 = iprot.readString();
                          _val275 = new Type();
                          _val275.read(iprot);
    @@ -16825,7 +17362,7 @@ public class ThriftHiveMetastore {
                  for (int _i279 = 0; _i279 < _map278.size; ++_i279)
                  {
                    String _key280; // required
    - Type _val281; // optional
    + Type _val281; // required
                    _key280 = iprot.readString();
                    _val281 = new Type();
                    _val281.read(iprot);
    @@ -22449,25 +22986,31 @@ public class ThriftHiveMetastore {

        }

    - public static class get_tables_args implements org.apache.thrift.TBase<get_tables_args, get_tables_args._Fields>, java.io.Serializable, Cloneable {
    - private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("get_tables_args");
    + public static class drop_table_with_environment_context_args implements org.apache.thrift.TBase<drop_table_with_environment_context_args, drop_table_with_environment_context_args._Fields>, java.io.Serializable, Cloneable {
    + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("drop_table_with_environment_context_args");

    - private static final org.apache.thrift.protocol.TField DB_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("db_name", org.apache.thrift.protocol.TType.STRING, (short)1);
    - private static final org.apache.thrift.protocol.TField PATTERN_FIELD_DESC = new org.apache.thrift.protocol.TField("pattern", org.apache.thrift.protocol.TType.STRING, (short)2);
    + private static final org.apache.thrift.protocol.TField DBNAME_FIELD_DESC = new org.apache.thrift.protocol.TField("dbname", org.apache.thrift.protocol.TType.STRING, (short)1);
    + private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)2);
    + private static final org.apache.thrift.protocol.TField DELETE_DATA_FIELD_DESC = new org.apache.thrift.protocol.TField("deleteData", org.apache.thrift.protocol.TType.BOOL, (short)3);
    + private static final org.apache.thrift.protocol.TField ENVIRONMENT_CONTEXT_FIELD_DESC = new org.apache.thrift.protocol.TField("environment_context", org.apache.thrift.protocol.TType.STRUCT, (short)4);

          private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
          static {
    - schemes.put(StandardScheme.class, new get_tables_argsStandardSchemeFactory());
    - schemes.put(TupleScheme.class, new get_tables_argsTupleSchemeFactory());
    + schemes.put(StandardScheme.class, new drop_table_with_environment_context_argsStandardSchemeFactory());
    + schemes.put(TupleScheme.class, new drop_table_with_environment_context_argsTupleSchemeFactory());
          }

    - private String db_name; // required
    - private String pattern; // required
    + private String dbname; // required
    + private String name; // required
    + private boolean deleteData; // required
    + private EnvironmentContext environment_context; // required

          /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
          public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    - DB_NAME((short)1, "db_name"),
    - PATTERN((short)2, "pattern");
    + DBNAME((short)1, "dbname"),
    + NAME((short)2, "name"),
    + DELETE_DATA((short)3, "deleteData"),
    + ENVIRONMENT_CONTEXT((short)4, "environment_context");

            private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();

    @@ -22482,10 +23025,14 @@ public class ThriftHiveMetastore {
             */
            public static _Fields findByThriftId(int fieldId) {
              switch(fieldId) {
    - case 1: // DB_NAME
    - return DB_NAME;
    - case 2: // PATTERN
    - return PATTERN;
    + case 1: // DBNAME
    + return DBNAME;
    + case 2: // NAME
    + return NAME;
    + case 3: // DELETE_DATA
    + return DELETE_DATA;
    + case 4: // ENVIRONMENT_CONTEXT
    + return ENVIRONMENT_CONTEXT;
                default:
                  return null;
              }
    @@ -22526,112 +23073,192 @@ public class ThriftHiveMetastore {
          }

          // isset id assignments
    + private static final int __DELETEDATA_ISSET_ID = 0;
    + private byte __isset_bitfield = 0;
          public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
          static {
            Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
    - tmpMap.put(_Fields.DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("db_name", org.apache.thrift.TFieldRequirementType.DEFAULT,
    + tmpMap.put(_Fields.DBNAME, new org.apache.thrift.meta_data.FieldMetaData("dbname", org.apache.thrift.TFieldRequirementType.DEFAULT,
                new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
    - tmpMap.put(_Fields.PATTERN, new org.apache.thrift.meta_data.FieldMetaData("pattern", org.apache.thrift.TFieldRequirementType.DEFAULT,
    + tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.DEFAULT,
                new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
    + tmpMap.put(_Fields.DELETE_DATA, new org.apache.thrift.meta_data.FieldMetaData("deleteData", org.apache.thrift.TFieldRequirementType.DEFAULT,
    + new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
    + tmpMap.put(_Fields.ENVIRONMENT_CONTEXT, new org.apache.thrift.meta_data.FieldMetaData("environment_context", org.apache.thrift.TFieldRequirementType.DEFAULT,
    + new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, EnvironmentContext.class)));
            metaDataMap = Collections.unmodifiableMap(tmpMap);
    - org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(get_tables_args.class, metaDataMap);
    + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(drop_table_with_environment_context_args.class, metaDataMap);
          }

    - public get_tables_args() {
    + public drop_table_with_environment_context_args() {
          }

    - public get_tables_args(
    - String db_name,
    - String pattern)
    + public drop_table_with_environment_context_args(
    + String dbname,
    + String name,
    + boolean deleteData,
    + EnvironmentContext environment_context)
          {
            this();
    - this.db_name = db_name;
    - this.pattern = pattern;
    + this.dbname = dbname;
    + this.name = name;
    + this.deleteData = deleteData;
    + setDeleteDataIsSet(true);
    + this.environment_context = environment_context;
          }

          /**
           * Performs a deep copy on <i>other</i>.
           */
    - public get_tables_args(get_tables_args other) {
    - if (other.isSetDb_name()) {
    - this.db_name = other.db_name;
    + public drop_table_with_environment_context_args(drop_table_with_environment_context_args other) {
    + __isset_bitfield = other.__isset_bitfield;
    + if (other.isSetDbname()) {
    + this.dbname = other.dbname;
            }
    - if (other.isSetPattern()) {
    - this.pattern = other.pattern;
    + if (other.isSetName()) {
    + this.name = other.name;
    + }
    + this.deleteData = other.deleteData;
    + if (other.isSetEnvironment_context()) {
    + this.environment_context = new EnvironmentContext(other.environment_context);
            }
          }

    - public get_tables_args deepCopy() {
    - return new get_tables_args(this);
    + public drop_table_with_environment_context_args deepCopy() {
    + return new drop_table_with_environment_context_args(this);
          }

          @Override
          public void clear() {
    - this.db_name = null;
    - this.pattern = null;
    + this.dbname = null;
    + this.name = null;
    + setDeleteDataIsSet(false);
    + this.deleteData = false;
    + this.environment_context = null;
          }

    - public String getDb_name() {
    - return this.db_name;
    + public String getDbname() {
    + return this.dbname;
          }

    - public void setDb_name(String db_name) {
    - this.db_name = db_name;
    + public void setDbname(String dbname) {
    + this.dbname = dbname;
          }

    - public void unsetDb_name() {
    - this.db_name = null;
    + public void unsetDbname() {
    + this.dbname = null;
          }

    - /** Returns true if field db_name is set (has been assigned a value) and false otherwise */
    - public boolean isSetDb_name() {
    - return this.db_name != null;
    + /** Returns true if field dbname is set (has been assigned a value) and false otherwise */
    + public boolean isSetDbname() {
    + return this.dbname != null;
          }

    - public void setDb_nameIsSet(boolean value) {
    + public void setDbnameIsSet(boolean value) {
            if (!value) {
    - this.db_name = null;
    + this.dbname = null;
            }
          }

    - public String getPattern() {
    - return this.pattern;
    + public String getName() {
    + return this.name;
          }

    - public void setPattern(String pattern) {
    - this.pattern = pattern;
    + public void setName(String name) {
    + this.name = name;
          }

    - public void unsetPattern() {
    - this.pattern = null;
    + public void unsetName() {
    + this.name = null;
          }

    - /** Returns true if field pattern is set (has been assigned a value) and false otherwise */
    - public boolean isSetPattern() {
    - return this.pattern != null;
    + /** Returns true if field name is set (has been assigned a value) and false otherwise */
    + public boolean isSetName() {
    + return this.name != null;
          }

    - public void setPatternIsSet(boolean value) {
    + public void setNameIsSet(boolean value) {
            if (!value) {
    - this.pattern = null;
    + this.name = null;
    + }
    + }
    +
    + public boolean isDeleteData() {
    + return this.deleteData;
    + }
    +
    + public void setDeleteData(boolean deleteData) {
    + this.deleteData = deleteData;
    + setDeleteDataIsSet(true);
    + }
    +
    + public void unsetDeleteData() {
    + __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __DELETEDATA_ISSET_ID);
    + }
    +
    + /** Returns true if field deleteData is set (has been assigned a value) and false otherwise */
    + public boolean isSetDeleteData() {
    + return EncodingUtils.testBit(__isset_bitfield, __DELETEDATA_ISSET_ID);
    + }
    +
    + public void setDeleteDataIsSet(boolean value) {
    + __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __DELETEDATA_ISSET_ID, value);
    + }
    +
    + public EnvironmentContext getEnvironment_context() {
    + return this.environment_context;
    + }
    +
    + public void setEnvironment_context(EnvironmentContext environment_context) {
    + this.environment_context = environment_context;
    + }
    +
    + public void unsetEnvironment_context() {
    + this.environment_context = null;
    + }
    +
    + /** Returns true if field environment_context is set (has been assigned a value) and false otherwise */
    + public boolean isSetEnvironment_context() {
    + return this.environment_context != null;
    + }
    +
    + public void setEnvironment_contextIsSet(boolean value) {
    + if (!value) {
    + this.environment_context = null;
            }
          }

          public void setFieldValue(_Fields field, Object value) {
            switch (field) {
    - case DB_NAME:
    + case DBNAME:
              if (value == null) {
    - unsetDb_name();
    + unsetDbname();
              } else {
    - setDb_name((String)value);
    + setDbname((String)value);
              }
              break;

    - case PATTERN:
    + case NAME:
              if (value == null) {
    - unsetPattern();
    + unsetName();
              } else {
    - setPattern((String)value);
    + setName((String)value);
    + }
    + break;
    +
    + case DELETE_DATA:
    + if (value == null) {
    + unsetDeleteData();
    + } else {
    + setDeleteData((Boolean)value);
    + }
    + break;
    +
    + case ENVIRONMENT_CONTEXT:
    + if (value == null) {
    + unsetEnvironment_context();
    + } else {
    + setEnvironment_context((EnvironmentContext)value);
              }
              break;

    @@ -22640,11 +23267,17 @@ public class ThriftHiveMetastore {

          public Object getFieldValue(_Fields field) {
            switch (field) {
    - case DB_NAME:
    - return getDb_name();
    + case DBNAME:
    + return getDbname();

    - case PATTERN:
    - return getPattern();
    + case NAME:
    + return getName();
    +
    + case DELETE_DATA:
    + return Boolean.valueOf(isDeleteData());
    +
    + case ENVIRONMENT_CONTEXT:
    + return getEnvironment_context();

            }
            throw new IllegalStateException();
    @@ -22657,10 +23290,14 @@ public class ThriftHiveMetastore {
            }

            switch (field) {
    - case DB_NAME:
    - return isSetDb_name();
    - case PATTERN:
    - return isSetPattern();
    + case DBNAME:
    + return isSetDbname();
    + case NAME:
    + return isSetName();
    + case DELETE_DATA:
    + return isSetDeleteData();
    + case ENVIRONMENT_CONTEXT:
    + return isSetEnvironment_context();
            }
            throw new IllegalStateException();
          }
    @@ -22669,30 +23306,48 @@ public class ThriftHiveMetastore {
          public boolean equals(Object that) {
            if (that == null)
              return false;
    - if (that instanceof get_tables_args)
    - return this.equals((get_tables_args)that);
    + if (that instanceof drop_table_with_environment_context_args)
    + return this.equals((drop_table_with_environment_context_args)that);
            return false;
          }

    - public boolean equals(get_tables_args that) {
    + public boolean equals(drop_table_with_environment_context_args that) {
            if (that == null)
              return false;

    - boolean this_present_db_name = true && this.isSetDb_name();
    - boolean that_present_db_name = true && that.isSetDb_name();
    - if (this_present_db_name || that_present_db_name) {
    - if (!(this_present_db_name && that_present_db_name))
    + boolean this_present_dbname = true && this.isSetDbname();
    + boolean that_present_dbname = true && that.isSetDbname();
    + if (this_present_dbname || that_present_dbname) {
    + if (!(this_present_dbname && that_present_dbname))
                return false;
    - if (!this.db_name.equals(that.db_name))
    + if (!this.dbname.equals(that.dbname))
                return false;
            }

    - boolean this_present_pattern = true && this.isSetPattern();
    - boolean that_present_pattern = true && that.isSetPattern();
    - if (this_present_pattern || that_present_pattern) {
    - if (!(this_present_pattern && that_present_pattern))
    + boolean this_present_name = true && this.isSetName();
    + boolean that_present_name = true && that.isSetName();
    + if (this_present_name || that_present_name) {
    + if (!(this_present_name && that_present_name))
                return false;
    - if (!this.pattern.equals(that.pattern))
    + if (!this.name.equals(that.name))
    + return false;
    + }
    +
    + boolean this_present_deleteData = true;
    + boolean that_present_deleteData = true;
    + if (this_present_deleteData || that_present_deleteData) {
    + if (!(this_present_deleteData && that_present_deleteData))
    + return false;
    + if (this.deleteData != that.deleteData)
    + return false;
    + }
    +
    + boolean this_present_environment_context = true && this.isSetEnvironment_context();
    + boolean that_present_environment_context = true && that.isSetEnvironment_context();
    + if (this_present_environment_context || that_present_environment_context) {
    + if (!(this_present_environment_context && that_present_environment_context))
    + return false;
    + if (!this.environment_context.equals(that.environment_context))
                return false;
            }

    @@ -22703,43 +23358,73 @@ public class ThriftHiveMetastore {
          public int hashCode() {
            HashCodeBuilder builder = new HashCodeBuilder();

    - boolean present_db_name = true && (isSetDb_name());
    - builder.append(present_db_name);
    - if (present_db_name)
    - builder.append(db_name);
    + boolean present_dbname = true && (isSetDbname());
    + builder.append(present_dbname);
    + if (present_dbname)
    + builder.append(dbname);

    - boolean present_pattern = true && (isSetPattern());
    - builder.append(present_pattern);
    - if (present_pattern)
    - builder.append(pattern);
    + boolean present_name = true && (isSetName());
    + builder.append(present_name);
    + if (present_name)
    + builder.append(name);
    +
    + boolean present_deleteData = true;
    + builder.append(present_deleteData);
    + if (present_deleteData)
    + builder.append(deleteData);
    +
    + boolean present_environment_context = true && (isSetEnvironment_context());
    + builder.append(present_environment_context);
    + if (present_environment_context)
    + builder.append(environment_context);

            return builder.toHashCode();
          }

    - public int compareTo(get_tables_args other) {
    + public int compareTo(drop_table_with_environment_context_args other) {
            if (!getClass().equals(other.getClass())) {
              return getClass().getName().compareTo(other.getClass().getName());
            }

            int lastComparison = 0;
    - get_tables_args typedOther = (get_tables_args)other;
    + drop_table_with_environment_context_args typedOther = (drop_table_with_environment_context_args)other;

    - lastComparison = Boolean.valueOf(isSetDb_name()).compareTo(typedOther.isSetDb_name());
    + lastComparison = Boolean.valueOf(isSetDbname()).compareTo(typedOther.isSetDbname());
            if (lastComparison != 0) {
              return lastComparison;
            }
    - if (isSetDb_name()) {
    - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.db_name, typedOther.db_name);
    + if (isSetDbname()) {
    + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dbname, typedOther.dbname);
              if (lastComparison != 0) {
                return lastComparison;
              }
            }
    - lastComparison = Boolean.valueOf(isSetPattern()).compareTo(typedOther.isSetPattern());
    + lastComparison = Boolean.valueOf(isSetName()).compareTo(typedOther.isSetName());
            if (lastComparison != 0) {
              return lastComparison;
            }
    - if (isSetPattern()) {
    - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.pattern, typedOther.pattern);
    + if (isSetName()) {
    + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, typedOther.name);
    + if (lastComparison != 0) {
    + return lastComparison;
    + }
    + }
    + lastComparison = Boolean.valueOf(isSetDeleteData()).compareTo(typedOther.isSetDeleteData());
    + if (lastComparison != 0) {
    + return lastComparison;
    + }
    + if (isSetDeleteData()) {
    + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.deleteData, typedOther.deleteData);
    + if (lastComparison != 0) {
    + return lastComparison;
    + }
    + }
    + lastComparison = Boolean.valueOf(isSetEnvironment_context()).compareTo(typedOther.isSetEnvironment_context());
    + if (lastComparison != 0) {
    + return lastComparison;
    + }
    + if (isSetEnvironment_context()) {
    + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.environment_context, typedOther.environment_context);
              if (lastComparison != 0) {
                return lastComparison;
              }
    @@ -22761,22 +23446,34 @@ public class ThriftHiveMetastore {

          @Override
          public String toString() {
    - StringBuilder sb = new StringBuilder("get_tables_args(");
    + StringBuilder sb = new StringBuilder("drop_table_with_environment_context_args(");
            boolean first = true;

    - sb.append("db_name:");
    - if (this.db_name == null) {
    + sb.append("dbname:");
    + if (this.dbname == null) {
              sb.append("null");
            } else {
    - sb.append(this.db_name);
    + sb.append(this.dbname);
            }
            first = false;
            if (!first) sb.append(", ");
    - sb.append("pattern:");
    - if (this.pattern == null) {
    + sb.append("name:");
    + if (this.name == null) {
              sb.append("null");
            } else {
    - sb.append(this.pattern);
    + sb.append(this.name);
    + }
    + first = false;
    + if (!first) sb.append(", ");
    + sb.append("deleteData:");
    + sb.append(this.deleteData);
    + first = false;
    + if (!first) sb.append(", ");
    + sb.append("environment_context:");
    + if (this.environment_context == null) {
    + sb.append("null");
    + } else {
    + sb.append(this.environment_context);
            }
            first = false;
            sb.append(")");
    @@ -22786,6 +23483,9 @@ public class ThriftHiveMetastore {
          public void validate() throws org.apache.thrift.TException {
            // check for required fields
            // check for sub-struct validity
    + if (environment_context != null) {
    + environment_context.validate();
    + }
          }

          private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
    @@ -22798,21 +23498,23 @@ public class ThriftHiveMetastore {

          private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
            try {
    + // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
    + __isset_bitfield = 0;
              read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
            } catch (org.apache.thrift.TException te) {
              throw new java.io.IOException(te);
            }
          }

    - private static class get_tables_argsStandardSchemeFactory implements SchemeFactory {
    - public get_tables_argsStandardScheme getScheme() {
    - return new get_tables_argsStandardScheme();
    + private static class drop_table_with_environment_context_argsStandardSchemeFactory implements SchemeFactory {
    + public drop_table_with_environment_context_argsStandardScheme getScheme() {
    + return new drop_table_with_environment_context_argsStandardScheme();
            }
          }

    - private static class get_tables_argsStandardScheme extends StandardScheme<get_tables_args> {
    + private static class drop_table_with_environment_context_argsStandardScheme extends StandardScheme<drop_table_with_environment_context_args> {

    - public void read(org.apache.thrift.protocol.TProtocol iprot, get_tables_args struct) throws org.apache.thrift.TException {
    + public void read(org.apache.thrift.protocol.TProtocol iprot, drop_table_with_environment_context_args struct) throws org.apache.thrift.TException {
              org.apache.thrift.protocol.TField schemeField;
              iprot.readStructBegin();
              while (true)
    @@ -22822,18 +23524,35 @@ public class ThriftHiveMetastore {
                  break;
                }
                switch (schemeField.id) {
    - case 1: // DB_NAME
    + case 1: // DBNAME
                    if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
    - struct.db_name = iprot.readString();
    - struct.setDb_nameIsSet(true);
    + struct.dbname = iprot.readString();
    + struct.setDbnameIsSet(true);
                    } else {
                      org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
                    }
                    break;
    - case 2: // PATTERN
    + case 2: // NAME
                    if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
    - struct.pattern = iprot.readString();
    - struct.setPatternIsSet(true);
    + struct.name = iprot.readString();
    + struct.setNameIsSet(true);
    + } else {
    + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
    + }
    + break;
    + case 3: // DELETE_DATA
    + if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
    + struct.deleteData = iprot.readBool();
    + struct.setDeleteDataIsSet(true);
    + } else {
    + org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
    + }
    + break;
    + case 4: // ENVIRONMENT_CONTEXT
    + if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
    + struct.environment_context = new EnvironmentContext();
    + struct.environment_context.read(iprot);
    + struct.setEnvironment_contextIsSet(true);
                    } else {
                      org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
                    }
    @@ -22847,18 +23566,26 @@ public class ThriftHiveMetastore {
              struct.validate();
            }

    - public void write(org.apache.thrift.protocol.TProtocol oprot, get_tables_args struct) throws org.apache.thrift.TException {
    + public void write(org.apache.thrift.protocol.TProtocol oprot, drop_table_with_environment_context_args struct) throws org.apache.thrift.TException {
              struct.validate();

              oprot.writeStructBegin(STRUCT_DESC);
    - if (struct.db_name != null) {
    - oprot.writeFieldBegin(DB_NAME_FIELD_DESC);
    - oprot.writeString(struct.db_name);
    + if (struct.dbname != null) {
    + oprot.writeFieldBegin(DBNAME_FIELD_DESC);
    + oprot.writeString(struct.dbname);
                oprot.writeFieldEnd();
              }
    - if (struct.pattern != null) {
    - oprot.writeFieldBegin(PATTERN_FIELD_DESC);
    - oprot.writeString(struct.pattern);
    + if (struct.name != null) {
    + oprot.writeFieldBegin(NAME_FIELD_DESC);
    + oprot.writeString(struct.name);
    + oprot.writeFieldEnd();
    + }
    + oprot.writeFieldBegin(DELETE_DATA_FIELD_DESC);
    + oprot.writeBool(struct.deleteData);
    + oprot.writeFieldEnd();
    + if (struct.environment_context != null) {
    + oprot.writeFieldBegin(ENVIRONMENT_CONTEXT_FIELD_DESC);
    + struct.environment_context.write(oprot);
                oprot.writeFieldEnd();
              }
              oprot.writeFieldStop();
    @@ -22867,69 +23594,90 @@ public class ThriftHiveMetastore {

          }

    - private static class get_tables_argsTupleSchemeFactory implements SchemeFactory {
    - public get_tables_argsTupleScheme getScheme() {
    - return new get_tables_argsTupleScheme();
    + private static class drop_table_with_environment_context_argsTupleSchemeFactory implements SchemeFactory {
    + public drop_table_with_environment_context_argsTupleScheme getScheme() {
    + return new drop_table_with_environment_context_argsTupleScheme();
            }
          }

    - private static class get_tables_argsTupleScheme extends TupleScheme<get_tables_args> {
    + private static class drop_table_with_environment_context_argsTupleScheme extends TupleScheme<drop_table_with_environment_context_args> {

            @Override
    - public void write(org.apache.thrift.protocol.TProtocol prot, get_tables_args struct) throws org.apache.thrift.TException {
    + public void write(org.apache.thrift.protocol.TProtocol prot, drop_table_with_environment_context_args struct) throws org.apache.thrift.TException {
              TTupleProtocol oprot = (TTupleProtocol) prot;
              BitSet optionals = new BitSet();
    - if (struct.isSetDb_name()) {
    + if (struct.isSetDbname()) {
                optionals.set(0);
              }
    - if (struct.isSetPattern()) {
    + if (struct.isSetName()) {
                optionals.set(1);
              }
    - oprot.writeBitSet(optionals, 2);
    - if (struct.isSetDb_name()) {
    - oprot.writeString(struct.db_name);
    + if (struct.isSetDeleteData()) {
    + optionals.set(2);
              }
    - if (struct.isSetPattern()) {
    - oprot.writeString(struct.pattern);
    + if (struct.isSetEnvironment_context()) {
    + optionals.set(3);
    + }
    + oprot.writeBitSet(optionals, 4);
    + if (struct.isSetDbname()) {
    + oprot.writeString(struct.dbname);
    + }
    + if (struct.isSetName()) {
    + oprot.writeString(struct.name);
    + }
    + if (struct.isSetDeleteData()) {
    + oprot.writeBool(struct.deleteData);
    + }
    + if (struct.isSetEnvironment_context()) {
    + struct.environment_context.write(oprot);
              }
            }

            @Override
    - public void read(org.apache.thrift.protocol.TProtocol prot, get_tables_args struct) throws org.apache.thrift.TException {
    + public void read(org.apache.thrift.protocol.TProtocol prot, drop_table_with_environment_context_args struct) throws org.apache.thrift.TException {
              TTupleProtocol iprot = (TTupleProtocol) prot;
    - BitSet incoming = iprot.readBitSet(2);
    + BitSet incoming = iprot.readBitSet(4);
              if (incoming.get(0)) {
    - struct.db_name = iprot.readString();
    - struct.setDb_nameIsSet(true);
    + struct.dbname = iprot.readString();
    + struct.setDbnameIsSet(true);
              }
              if (incoming.get(1)) {
    - struct.pattern = iprot.readString();
    - struct.setPatternIsSet(true);
    + struct.name = iprot.readString();
    + struct.setNameIsSet(true);
    + }
    + if (incoming.get(2)) {
    + struct.deleteData = iprot.readBool();
    + struct.setDeleteDataIsSet(true);
    + }
    + if (incoming.get(3)) {
    + struct.environment_context = new EnvironmentContext();
    + struct.environment_context.read(iprot);
    + struct.setEnvironment_contextIsSet(true);
              }
            }
          }

        }

    - public static class get_tables_result implements org.apache.thrift.TBase<get_tables_result, get_tables_result._Fields>, java.io.Serializable, Cloneable {
    - private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("get_tables_result");
    + public static class drop_table_with_environment_context_result implements org.apache.thrift.TBase<drop_table_with_environment_context_result, drop_table_with_environment_context_result._Fields>, java.io.Serializable, Cloneable {
    + private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("drop_table_with_environment_context_result");

    - private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0);
          private static final org.apache.thrift.protocol.TField O1_FIELD_DESC = new org.apache.thrift.protocol.TField("o1", org.apache.thrift.protocol.TType.STRUCT, (short)1);
    + private static final org.apache.thrift.protocol.TField O3_FIELD_DESC = new org.apache.thrift.protocol.TField("o3", org.apache.thrift.protocol.TType.STRUCT, (short)2);

          private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
          static {
    - schemes.put(StandardScheme.class, new get_tables_resultStandardSchemeFactory());
    - schemes.put(TupleScheme.class, new get_tables_resultTupleSchemeFactory());
    + schemes.put(StandardScheme.class, new drop_table_with_environment_context_resultStandardSchemeFactory());
    + schemes.put(TupleScheme.class, new drop_table_with_environment_context_resultTupleSchemeFactory());
          }

    - private List<String> success; // required
    - private MetaException o1; // required
    + private NoSuchObjectException o1; // required
    + private MetaException o3; // required

          /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
          public enum _Fields implements org.apache.thrift.TFieldIdEnum {
    - SUCCESS((short)0, "success"),
    - O1((short)1, "o1");
    + O1((short)1, "o1"),
    + O3((short)2, "o3");

            private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();

    @@ -22944,10 +23692,10 @@ public class ThriftHiveMetastore {
             */
            public static _Fields findByThriftId(int fieldId) {
              switch(fieldId) {
    - case 0: // SUCCESS
    - return SUCCESS;
                case 1: // O1
                  return O1;
    + case 2: // O3
    + return O3;
                default:
                  return null;
              }
    @@ -22991,129 +23739,109 @@ public class ThriftHiveMetastore {
          public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
          static {
            Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
    - tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
    - new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
    - new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
            tmpMap.put(_Fields.O1, new org.apache.thrift.meta_data.FieldMetaData("o1", org.apache.thrift.TFieldRequirementType.DEFAULT,
                new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
    + tmpMap.put(_Fields.O3, new org.apache.thrift.meta_data.FieldMetaData("o3", org.apache.thrift.TFieldRequirementType.DEFAULT,
    + new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
            metaDataMap = Collections.unmodifiableMap(tmpMap);
    - org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(get_tables_result.class, metaDataMap);
    + org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(drop_table_with_environment_context_result.class, metaDataMap);
          }

    - public get_tables_result() {
    + public drop_table_with_environment_context_result() {
          }

    - public get_tables_result(
    - List<String> success,
    - MetaException o1)
    + public drop_table_with_environment_context_result(
    + NoSuchObjectException o1,
    + MetaException o3)
          {
            this();
    - this.success = success;
            this.o1 = o1;
    + this.o3 = o3;
          }

          /**
           * Performs a deep copy on <i>other</i>.
           */
    - public get_tables_result(get_tables_result other) {
    - if (other.isSetSuccess()) {
    - List<String> __this__success = new ArrayList<String>();
    - for (String other_element : other.success) {
    - __this__success.add(other_element);
    - }
    - this.success = __this__success;
    - }
    + public drop_table_with_environment_context_result(drop_table_with_environment_context_result other) {
            if (other.isSetO1()) {
    - this.o1 = new MetaException(other.o1);
    + this.o1 = new NoSuchObjectException(other.o1);
    + }
    + if (other.isSetO3()) {
    + this.o3 = new MetaException(other.o3);
            }
          }

    - public get_tables_result deepCopy() {
    - return new get_tables_result(this);
    + public drop_table_with_environment_context_result deepCopy() {
    + return new drop_table_with_environment_context_result(this);
          }

          @Override
          public void clear() {
    - this.success = null;
            this.o1 = null;
    + this.o3 = null;
          }

    - public int getSuccessSize() {
    - return (this.success == null) ? 0 : this.success.size();
    - }
    -
    - public java.util.Iterator<String> getSuccessIterator() {
    - return (this.success == null) ? null : this.success.iterator();
    - }
    -
    - public void addToSuccess(String elem) {
    - if (this.success == null) {
    - this.success = new ArrayList<String>();
    - }
    - this.success.add(elem);
    - }
    -
    - public List<String> getSuccess() {
    - return this.success;
    + public NoSuchObjectException getO1() {
    + return this.o1;
          }

    - public void setSuccess(List<String> success) {
    - this.success = success;
    + public void setO1(NoSuchObjectException o1) {
    + this.o1 = o1;
          }

    - public void unsetSuccess() {
    - this.success = null;
    + public void unsetO1() {
    + this.o1 = null;
          }

    - /** Returns true if field success is set (has been assigned a value) and false otherwise */
    - public boolean isSetSuccess() {
    - return this.success != null;
    + /** Returns true if field o1 is set (has been assigned a value) and false otherwise */
    + public boolean isSetO1() {
    + return this.o1 != null;
          }

    - public void setSuccessIsSet(boolean value) {
    + public void setO1IsSet(boolean value) {
            if (!value) {
    - this.success = null;
    + this.o1 = null;
            }
          }

    - public MetaException getO1() {
    - return this.o1;
    + public MetaException getO3() {
    + return this.o3;
          }

    - public void setO1(MetaException o1) {
    - this.o1 = o1;
    + public void setO3(MetaException o3) {
    + this.o3 = o3;
          }

    - public void unsetO1() {
    - this.o1 = null;
    + public void unsetO3() {
    + this.o3 = null;
          }

    - /** Returns true if field o1 is set (has been assigned a value) and false otherwise */
    - public boolean isSetO1() {
    - return this.o1 != null;
    + /** Returns true if field o3 is set (has been assigned a value) and false otherwise */
    + public boolean isSetO3() {
    + return this.o3 != null;
          }

    - public void setO1IsSet(boolean value) {
    + public void setO3IsSet(boolean value) {
            if (!value) {
    - this.o1 = null;
    + this.o3 = null;
            }
          }

          public void setFieldValue(_Fields field, Object value) {
            switch (field) {
    - case SUCCESS:
    + case O1:
              if (value == null) {
    - unsetSuccess();
    + unsetO1();
              } else {
    - setSuccess((List<String>)value);
    + setO1((NoSuchObjectException)value);
              }
              break;

    - case O1:
    + case O3:
              if (value == null) {
    - unsetO1();
    + unsetO3();
              } else {
    - setO1((MetaException)value);
    + setO3((MetaException)value);
              }
              break;

    @@ -23122,12 +23850,12 @@ public class ThriftHiveMetastore {

          public Object getFieldValue(_Fields field) {
            switch (field) {
    - case SUCCESS:
    - return getSuccess();
    -
            case O1:
              return getO1();

    + case O3:
    + return getO3();
    +
            }
            throw new IllegalStateException();
          }
    @@ -23139,10 +23867,10 @@ public class ThriftHiveMetastore {
            }

            switch (field) {
    - case SUCCESS:
    - return isSetSuccess();
            case O1:
              return isSetO1();
    + case O3:
    + return isSetO3();
            }
            throw new IllegalStateException();
          }
    @@ -23151,24 +23879,15 @@ public class ThriftHiveMetastore {
          public boolean equals(Object that) {
            if (that == null)
              return false;
    - if (that instanceof get_tables_result)
    - return this.equals((get_tables_result)that);
    + if (that instanceof drop_table_with_environment_context_result)
    + return this.equals((drop_table_with_environment_context_result)that);
            return false;
          }

    - public boolean equals(get_tables_result that) {
    + public boolean equals(drop_table_with_environment_context_result that) {
            if (that == null)
              return false;

    - boolean this_present_success = true && this.isSetSuccess();
    - boolean that_present_success = true && that.isSetSuccess();
    - if (this_present_success || that_present_success) {
    - if (!(this_present_success && that_present_success))
    - return false;
    - if (!this.success.equals(that.success))
    - return false;
    - }
    -
            boolean this_present_o1 = true && this.isSetO1();
            boolean that_present_o1 = true && that.isSetO1();
            if (this_present_o1 || that_present_o1) {
    @@ -23178,6 +23897,15 @@ public class ThriftHiveMetastore {
                return false;
            }

    + boolean this_present_o3 = true && this.isSetO3();
    + boolean that_present_o3 = true && that.isSetO3();
    + if (this_present_o3 || that_present_o3) {
    + if (!(this_present_o3 && that_present_o3))
    + return false;
    + if (!this.o3.equals(that.o3))
    + return false;
    + }
    +
            return true;
          }

    @@ -23185,43 +23913,43 @@ public class ThriftHiveMetastore {
          public int hashCode() {
            HashCodeBuilder builder = new HashCodeBuilder();

    - boolean present_success = true && (isSetSuccess());
    - builder.append(present_success);
    - if (present_success)
    - builder.append(success);
    -
            boolean present_o1 = true && (isSetO1());
            builder.append(present_o1);
            if (present_o1)
              builder.append(o1);

    + boolean present_o3 = true && (isSetO3());
    + builder.append(present_o3);
    + if (present_o3)
    + builder.append(o3);
    +
            return builder.toHashCode();
          }

    - public int compareTo(get_tables_result other) {
    + public int compareTo(drop_table_with_environment_context_result other) {
            if (!getClass().equals(other.getClass())) {
              return getClass().getName().compareTo(other.getClass().getName());
            }

            int lastComparison = 0;
    - get_tables_result typedOther = (get_tables_result)other;
    + drop_table_with_environment_context_result typedOther = (drop_table_with_environment_context_result)other;

    - lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(typedOther.isSetSuccess());
    + lastComparison = Boolean.valueOf(isSetO1()).compareTo(typedOther.isSetO1());
            if (lastComparison != 0) {
              return lastComparison;
            }
    - if (isSetSuccess()) {
    - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success);
    + if (isSetO1()) {
    + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.o1, typedOther.o1);
              if (lastComparison != 0) {
                return lastComparison;
              }
            }
    - lastComparison = Boolean.valueOf(isSetO1()).compareTo(typedOther.isSetO1());
    + lastComparison = Boolean.valueOf(isSetO3()).compareTo(typedOther.isSetO3());
            if (lastComparison != 0) {
              return lastComparison;
            }
    - if (isSetO1()) {
    - lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.o1, typedOther.o1);
    + if (isSetO3()) {
    + lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.o3, typedOther.o3);
              if (lastComparison != 0) {
                return lastComparison;
              }
    @@ -23243,22 +23971,22 @@ public class ThriftHiveMetastore {

          @Override
          public String toString() {
    - StringBuilder sb = new StringBuilder("get_tables_result(");
    + StringBuilder sb = new StringBuilder("drop_table_with_environment_context_result(");
            boolean first = true;

    - sb.append("success:");
    - if (this.success == null) {
    + sb.append("o1:");
    + if (this.o1 == null) {
              sb.append("null");
            } else {
    - sb.append(this.success);
    + sb.append(this.o1);
            }
            first = false;
            if (!first) sb.append(", ");
    - sb.append("o1:");
    - if (this.o1 == null) {
    + sb.append("o3:");
    + if (this.o3 == null) {
              sb.append("null");
            } else {
    - sb.append(this.o1);
    + sb.append(this.o3);
            }
            first = false;
            sb.append(")");
    @@ -23286,15 +24014,15 @@ public class ThriftHiveMetastore {
            }
          }

    - private static class get_tables_resultStandardSchemeFactory implements SchemeFactory {
    - public get_tables_resultStandardScheme getScheme() {
    - return new get_tables_resultStandardScheme();

    [... 19368 lines stripped ...]
  • Kevinwilfong at Feb 12, 2013 at 6:53 pm
    Modified: hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore-remote Tue Feb 12 18:52:55 2013
    @@ -38,6 +38,7 @@ if len(sys.argv) <= 1 or sys.argv[1] ==
        print ' void create_table(Table tbl)'
        print ' void create_table_with_environment_context(Table tbl, EnvironmentContext environment_context)'
        print ' void drop_table(string dbname, string name, bool deleteData)'
    + print ' void drop_table_with_environment_context(string dbname, string name, bool deleteData, EnvironmentContext environment_context)'
        print ' get_tables(string db_name, string pattern)'
        print ' get_all_tables(string db_name)'
        print ' Table get_table(string dbname, string tbl_name)'
    @@ -49,9 +50,13 @@ if len(sys.argv) <= 1 or sys.argv[1] ==
        print ' Partition add_partition_with_environment_context(Partition new_part, EnvironmentContext environment_context)'
        print ' i32 add_partitions( new_parts)'
        print ' Partition append_partition(string db_name, string tbl_name, part_vals)'
    + print ' Partition append_partition_with_environment_context(string db_name, string tbl_name, part_vals, EnvironmentContext environment_context)'
        print ' Partition append_partition_by_name(string db_name, string tbl_name, string part_name)'
    + print ' Partition append_partition_by_name_with_environment_context(string db_name, string tbl_name, string part_name, EnvironmentContext environment_context)'
        print ' bool drop_partition(string db_name, string tbl_name, part_vals, bool deleteData)'
    + print ' bool drop_partition_with_environment_context(string db_name, string tbl_name, part_vals, bool deleteData, EnvironmentContext environment_context)'
        print ' bool drop_partition_by_name(string db_name, string tbl_name, string part_name, bool deleteData)'
    + print ' bool drop_partition_by_name_with_environment_context(string db_name, string tbl_name, string part_name, bool deleteData, EnvironmentContext environment_context)'
        print ' Partition get_partition(string db_name, string tbl_name, part_vals)'
        print ' Partition get_partition_with_auth(string db_name, string tbl_name, part_vals, string user_name, group_names)'
        print ' Partition get_partition_by_name(string db_name, string tbl_name, string part_name)'
    @@ -239,6 +244,12 @@ elif cmd == 'drop_table':
          sys.exit(1)
        pp.pprint(client.drop_table(args[0],args[1],eval(args[2]),))

    +elif cmd == 'drop_table_with_environment_context':
    + if len(args) != 4:
    + print 'drop_table_with_environment_context requires 4 args'
    + sys.exit(1)
    + pp.pprint(client.drop_table_with_environment_context(args[0],args[1],eval(args[2]),eval(args[3]),))
    +
      elif cmd == 'get_tables':
        if len(args) != 2:
          print 'get_tables requires 2 args'
    @@ -305,24 +316,48 @@ elif cmd == 'append_partition':
          sys.exit(1)
        pp.pprint(client.append_partition(args[0],args[1],eval(args[2]),))

    +elif cmd == 'append_partition_with_environment_context':
    + if len(args) != 4:
    + print 'append_partition_with_environment_context requires 4 args'
    + sys.exit(1)
    + pp.pprint(client.append_partition_with_environment_context(args[0],args[1],eval(args[2]),eval(args[3]),))
    +
      elif cmd == 'append_partition_by_name':
        if len(args) != 3:
          print 'append_partition_by_name requires 3 args'
          sys.exit(1)
        pp.pprint(client.append_partition_by_name(args[0],args[1],args[2],))

    +elif cmd == 'append_partition_by_name_with_environment_context':
    + if len(args) != 4:
    + print 'append_partition_by_name_with_environment_context requires 4 args'
    + sys.exit(1)
    + pp.pprint(client.append_partition_by_name_with_environment_context(args[0],args[1],args[2],eval(args[3]),))
    +
      elif cmd == 'drop_partition':
        if len(args) != 4:
          print 'drop_partition requires 4 args'
          sys.exit(1)
        pp.pprint(client.drop_partition(args[0],args[1],eval(args[2]),eval(args[3]),))

    +elif cmd == 'drop_partition_with_environment_context':
    + if len(args) != 5:
    + print 'drop_partition_with_environment_context requires 5 args'
    + sys.exit(1)
    + pp.pprint(client.drop_partition_with_environment_context(args[0],args[1],eval(args[2]),eval(args[3]),eval(args[4]),))
    +
      elif cmd == 'drop_partition_by_name':
        if len(args) != 4:
          print 'drop_partition_by_name requires 4 args'
          sys.exit(1)
        pp.pprint(client.drop_partition_by_name(args[0],args[1],args[2],eval(args[3]),))

    +elif cmd == 'drop_partition_by_name_with_environment_context':
    + if len(args) != 5:
    + print 'drop_partition_by_name_with_environment_context requires 5 args'
    + sys.exit(1)
    + pp.pprint(client.drop_partition_by_name_with_environment_context(args[0],args[1],args[2],eval(args[3]),eval(args[4]),))
    +
      elif cmd == 'get_partition':
        if len(args) != 3:
          print 'get_partition requires 3 args'
  • Kevinwilfong at Feb 12, 2013 at 6:53 pm
    Modified: hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py Tue Feb 12 18:52:55 2013
    @@ -131,6 +131,16 @@ class Iface(fb303.FacebookService.Iface)
          """
          pass

    + def drop_table_with_environment_context(self, dbname, name, deleteData, environment_context):
    + """
    + Parameters:
    + - dbname
    + - name
    + - deleteData
    + - environment_context
    + """
    + pass
    +
        def get_tables(self, db_name, pattern):
          """
          Parameters:
    @@ -221,6 +231,16 @@ class Iface(fb303.FacebookService.Iface)
          """
          pass

    + def append_partition_with_environment_context(self, db_name, tbl_name, part_vals, environment_context):
    + """
    + Parameters:
    + - db_name
    + - tbl_name
    + - part_vals
    + - environment_context
    + """
    + pass
    +
        def append_partition_by_name(self, db_name, tbl_name, part_name):
          """
          Parameters:
    @@ -230,6 +250,16 @@ class Iface(fb303.FacebookService.Iface)
          """
          pass

    + def append_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, environment_context):
    + """
    + Parameters:
    + - db_name
    + - tbl_name
    + - part_name
    + - environment_context
    + """
    + pass
    +
        def drop_partition(self, db_name, tbl_name, part_vals, deleteData):
          """
          Parameters:
    @@ -240,6 +270,17 @@ class Iface(fb303.FacebookService.Iface)
          """
          pass

    + def drop_partition_with_environment_context(self, db_name, tbl_name, part_vals, deleteData, environment_context):
    + """
    + Parameters:
    + - db_name
    + - tbl_name
    + - part_vals
    + - deleteData
    + - environment_context
    + """
    + pass
    +
        def drop_partition_by_name(self, db_name, tbl_name, part_name, deleteData):
          """
          Parameters:
    @@ -250,6 +291,17 @@ class Iface(fb303.FacebookService.Iface)
          """
          pass

    + def drop_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, deleteData, environment_context):
    + """
    + Parameters:
    + - db_name
    + - tbl_name
    + - part_name
    + - deleteData
    + - environment_context
    + """
    + pass
    +
        def get_partition(self, db_name, tbl_name, part_vals):
          """
          Parameters:
    @@ -1183,6 +1235,44 @@ class Client(fb303.FacebookService.Clien
            raise result.o3
          return

    + def drop_table_with_environment_context(self, dbname, name, deleteData, environment_context):
    + """
    + Parameters:
    + - dbname
    + - name
    + - deleteData
    + - environment_context
    + """
    + self.send_drop_table_with_environment_context(dbname, name, deleteData, environment_context)
    + self.recv_drop_table_with_environment_context()
    +
    + def send_drop_table_with_environment_context(self, dbname, name, deleteData, environment_context):
    + self._oprot.writeMessageBegin('drop_table_with_environment_context', TMessageType.CALL, self._seqid)
    + args = drop_table_with_environment_context_args()
    + args.dbname = dbname
    + args.name = name
    + args.deleteData = deleteData
    + args.environment_context = environment_context
    + args.write(self._oprot)
    + self._oprot.writeMessageEnd()
    + self._oprot.trans.flush()
    +
    + def recv_drop_table_with_environment_context(self, ):
    + (fname, mtype, rseqid) = self._iprot.readMessageBegin()
    + if mtype == TMessageType.EXCEPTION:
    + x = TApplicationException()
    + x.read(self._iprot)
    + self._iprot.readMessageEnd()
    + raise x
    + result = drop_table_with_environment_context_result()
    + result.read(self._iprot)
    + self._iprot.readMessageEnd()
    + if result.o1 is not None:
    + raise result.o1
    + if result.o3 is not None:
    + raise result.o3
    + return
    +
        def get_tables(self, db_name, pattern):
          """
          Parameters:
    @@ -1587,6 +1677,48 @@ class Client(fb303.FacebookService.Clien
            raise result.o3
          raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition failed: unknown result");

    + def append_partition_with_environment_context(self, db_name, tbl_name, part_vals, environment_context):
    + """
    + Parameters:
    + - db_name
    + - tbl_name
    + - part_vals
    + - environment_context
    + """
    + self.send_append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context)
    + return self.recv_append_partition_with_environment_context()
    +
    + def send_append_partition_with_environment_context(self, db_name, tbl_name, part_vals, environment_context):
    + self._oprot.writeMessageBegin('append_partition_with_environment_context', TMessageType.CALL, self._seqid)
    + args = append_partition_with_environment_context_args()
    + args.db_name = db_name
    + args.tbl_name = tbl_name
    + args.part_vals = part_vals
    + args.environment_context = environment_context
    + args.write(self._oprot)
    + self._oprot.writeMessageEnd()
    + self._oprot.trans.flush()
    +
    + def recv_append_partition_with_environment_context(self, ):
    + (fname, mtype, rseqid) = self._iprot.readMessageBegin()
    + if mtype == TMessageType.EXCEPTION:
    + x = TApplicationException()
    + x.read(self._iprot)
    + self._iprot.readMessageEnd()
    + raise x
    + result = append_partition_with_environment_context_result()
    + result.read(self._iprot)
    + self._iprot.readMessageEnd()
    + if result.success is not None:
    + return result.success
    + if result.o1 is not None:
    + raise result.o1
    + if result.o2 is not None:
    + raise result.o2
    + if result.o3 is not None:
    + raise result.o3
    + raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_with_environment_context failed: unknown result");
    +
        def append_partition_by_name(self, db_name, tbl_name, part_name):
          """
          Parameters:
    @@ -1627,6 +1759,48 @@ class Client(fb303.FacebookService.Clien
            raise result.o3
          raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_by_name failed: unknown result");

    + def append_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, environment_context):
    + """
    + Parameters:
    + - db_name
    + - tbl_name
    + - part_name
    + - environment_context
    + """
    + self.send_append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, environment_context)
    + return self.recv_append_partition_by_name_with_environment_context()
    +
    + def send_append_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, environment_context):
    + self._oprot.writeMessageBegin('append_partition_by_name_with_environment_context', TMessageType.CALL, self._seqid)
    + args = append_partition_by_name_with_environment_context_args()
    + args.db_name = db_name
    + args.tbl_name = tbl_name
    + args.part_name = part_name
    + args.environment_context = environment_context
    + args.write(self._oprot)
    + self._oprot.writeMessageEnd()
    + self._oprot.trans.flush()
    +
    + def recv_append_partition_by_name_with_environment_context(self, ):
    + (fname, mtype, rseqid) = self._iprot.readMessageBegin()
    + if mtype == TMessageType.EXCEPTION:
    + x = TApplicationException()
    + x.read(self._iprot)
    + self._iprot.readMessageEnd()
    + raise x
    + result = append_partition_by_name_with_environment_context_result()
    + result.read(self._iprot)
    + self._iprot.readMessageEnd()
    + if result.success is not None:
    + return result.success
    + if result.o1 is not None:
    + raise result.o1
    + if result.o2 is not None:
    + raise result.o2
    + if result.o3 is not None:
    + raise result.o3
    + raise TApplicationException(TApplicationException.MISSING_RESULT, "append_partition_by_name_with_environment_context failed: unknown result");
    +
        def drop_partition(self, db_name, tbl_name, part_vals, deleteData):
          """
          Parameters:
    @@ -1667,6 +1841,48 @@ class Client(fb303.FacebookService.Clien
            raise result.o2
          raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition failed: unknown result");

    + def drop_partition_with_environment_context(self, db_name, tbl_name, part_vals, deleteData, environment_context):
    + """
    + Parameters:
    + - db_name
    + - tbl_name
    + - part_vals
    + - deleteData
    + - environment_context
    + """
    + self.send_drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context)
    + return self.recv_drop_partition_with_environment_context()
    +
    + def send_drop_partition_with_environment_context(self, db_name, tbl_name, part_vals, deleteData, environment_context):
    + self._oprot.writeMessageBegin('drop_partition_with_environment_context', TMessageType.CALL, self._seqid)
    + args = drop_partition_with_environment_context_args()
    + args.db_name = db_name
    + args.tbl_name = tbl_name
    + args.part_vals = part_vals
    + args.deleteData = deleteData
    + args.environment_context = environment_context
    + args.write(self._oprot)
    + self._oprot.writeMessageEnd()
    + self._oprot.trans.flush()
    +
    + def recv_drop_partition_with_environment_context(self, ):
    + (fname, mtype, rseqid) = self._iprot.readMessageBegin()
    + if mtype == TMessageType.EXCEPTION:
    + x = TApplicationException()
    + x.read(self._iprot)
    + self._iprot.readMessageEnd()
    + raise x
    + result = drop_partition_with_environment_context_result()
    + result.read(self._iprot)
    + self._iprot.readMessageEnd()
    + if result.success is not None:
    + return result.success
    + if result.o1 is not None:
    + raise result.o1
    + if result.o2 is not None:
    + raise result.o2
    + raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_with_environment_context failed: unknown result");
    +
        def drop_partition_by_name(self, db_name, tbl_name, part_name, deleteData):
          """
          Parameters:
    @@ -1707,6 +1923,48 @@ class Client(fb303.FacebookService.Clien
            raise result.o2
          raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_by_name failed: unknown result");

    + def drop_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, deleteData, environment_context):
    + """
    + Parameters:
    + - db_name
    + - tbl_name
    + - part_name
    + - deleteData
    + - environment_context
    + """
    + self.send_drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context)
    + return self.recv_drop_partition_by_name_with_environment_context()
    +
    + def send_drop_partition_by_name_with_environment_context(self, db_name, tbl_name, part_name, deleteData, environment_context):
    + self._oprot.writeMessageBegin('drop_partition_by_name_with_environment_context', TMessageType.CALL, self._seqid)
    + args = drop_partition_by_name_with_environment_context_args()
    + args.db_name = db_name
    + args.tbl_name = tbl_name
    + args.part_name = part_name
    + args.deleteData = deleteData
    + args.environment_context = environment_context
    + args.write(self._oprot)
    + self._oprot.writeMessageEnd()
    + self._oprot.trans.flush()
    +
    + def recv_drop_partition_by_name_with_environment_context(self, ):
    + (fname, mtype, rseqid) = self._iprot.readMessageBegin()
    + if mtype == TMessageType.EXCEPTION:
    + x = TApplicationException()
    + x.read(self._iprot)
    + self._iprot.readMessageEnd()
    + raise x
    + result = drop_partition_by_name_with_environment_context_result()
    + result.read(self._iprot)
    + self._iprot.readMessageEnd()
    + if result.success is not None:
    + return result.success
    + if result.o1 is not None:
    + raise result.o1
    + if result.o2 is not None:
    + raise result.o2
    + raise TApplicationException(TApplicationException.MISSING_RESULT, "drop_partition_by_name_with_environment_context failed: unknown result");
    +
        def get_partition(self, db_name, tbl_name, part_vals):
          """
          Parameters:
    @@ -3447,6 +3705,7 @@ class Processor(fb303.FacebookService.Pr
          self._processMap["create_table"] = Processor.process_create_table
          self._processMap["create_table_with_environment_context"] = Processor.process_create_table_with_environment_context
          self._processMap["drop_table"] = Processor.process_drop_table
    + self._processMap["drop_table_with_environment_context"] = Processor.process_drop_table_with_environment_context
          self._processMap["get_tables"] = Processor.process_get_tables
          self._processMap["get_all_tables"] = Processor.process_get_all_tables
          self._processMap["get_table"] = Processor.process_get_table
    @@ -3458,9 +3717,13 @@ class Processor(fb303.FacebookService.Pr
          self._processMap["add_partition_with_environment_context"] = Processor.process_add_partition_with_environment_context
          self._processMap["add_partitions"] = Processor.process_add_partitions
          self._processMap["append_partition"] = Processor.process_append_partition
    + self._processMap["append_partition_with_environment_context"] = Processor.process_append_partition_with_environment_context
          self._processMap["append_partition_by_name"] = Processor.process_append_partition_by_name
    + self._processMap["append_partition_by_name_with_environment_context"] = Processor.process_append_partition_by_name_with_environment_context
          self._processMap["drop_partition"] = Processor.process_drop_partition
    + self._processMap["drop_partition_with_environment_context"] = Processor.process_drop_partition_with_environment_context
          self._processMap["drop_partition_by_name"] = Processor.process_drop_partition_by_name
    + self._processMap["drop_partition_by_name_with_environment_context"] = Processor.process_drop_partition_by_name_with_environment_context
          self._processMap["get_partition"] = Processor.process_get_partition
          self._processMap["get_partition_with_auth"] = Processor.process_get_partition_with_auth
          self._processMap["get_partition_by_name"] = Processor.process_get_partition_by_name
    @@ -3775,6 +4038,22 @@ class Processor(fb303.FacebookService.Pr
          oprot.writeMessageEnd()
          oprot.trans.flush()

    + def process_drop_table_with_environment_context(self, seqid, iprot, oprot):
    + args = drop_table_with_environment_context_args()
    + args.read(iprot)
    + iprot.readMessageEnd()
    + result = drop_table_with_environment_context_result()
    + try:
    + self._handler.drop_table_with_environment_context(args.dbname, args.name, args.deleteData, args.environment_context)
    + except NoSuchObjectException as o1:
    + result.o1 = o1
    + except MetaException as o3:
    + result.o3 = o3
    + oprot.writeMessageBegin("drop_table_with_environment_context", TMessageType.REPLY, seqid)
    + result.write(oprot)
    + oprot.writeMessageEnd()
    + oprot.trans.flush()
    +
        def process_get_tables(self, seqid, iprot, oprot):
          args = get_tables_args()
          args.read(iprot)
    @@ -3959,6 +4238,24 @@ class Processor(fb303.FacebookService.Pr
          oprot.writeMessageEnd()
          oprot.trans.flush()

    + def process_append_partition_with_environment_context(self, seqid, iprot, oprot):
    + args = append_partition_with_environment_context_args()
    + args.read(iprot)
    + iprot.readMessageEnd()
    + result = append_partition_with_environment_context_result()
    + try:
    + result.success = self._handler.append_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.environment_context)
    + except InvalidObjectException as o1:
    + result.o1 = o1
    + except AlreadyExistsException as o2:
    + result.o2 = o2
    + except MetaException as o3:
    + result.o3 = o3
    + oprot.writeMessageBegin("append_partition_with_environment_context", TMessageType.REPLY, seqid)
    + result.write(oprot)
    + oprot.writeMessageEnd()
    + oprot.trans.flush()
    +
        def process_append_partition_by_name(self, seqid, iprot, oprot):
          args = append_partition_by_name_args()
          args.read(iprot)
    @@ -3977,6 +4274,24 @@ class Processor(fb303.FacebookService.Pr
          oprot.writeMessageEnd()
          oprot.trans.flush()

    + def process_append_partition_by_name_with_environment_context(self, seqid, iprot, oprot):
    + args = append_partition_by_name_with_environment_context_args()
    + args.read(iprot)
    + iprot.readMessageEnd()
    + result = append_partition_by_name_with_environment_context_result()
    + try:
    + result.success = self._handler.append_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.environment_context)
    + except InvalidObjectException as o1:
    + result.o1 = o1
    + except AlreadyExistsException as o2:
    + result.o2 = o2
    + except MetaException as o3:
    + result.o3 = o3
    + oprot.writeMessageBegin("append_partition_by_name_with_environment_context", TMessageType.REPLY, seqid)
    + result.write(oprot)
    + oprot.writeMessageEnd()
    + oprot.trans.flush()
    +
        def process_drop_partition(self, seqid, iprot, oprot):
          args = drop_partition_args()
          args.read(iprot)
    @@ -3993,27 +4308,59 @@ class Processor(fb303.FacebookService.Pr
          oprot.writeMessageEnd()
          oprot.trans.flush()

    - def process_drop_partition_by_name(self, seqid, iprot, oprot):
    - args = drop_partition_by_name_args()
    + def process_drop_partition_with_environment_context(self, seqid, iprot, oprot):
    + args = drop_partition_with_environment_context_args()
          args.read(iprot)
          iprot.readMessageEnd()
    - result = drop_partition_by_name_result()
    + result = drop_partition_with_environment_context_result()
          try:
    - result.success = self._handler.drop_partition_by_name(args.db_name, args.tbl_name, args.part_name, args.deleteData)
    + result.success = self._handler.drop_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.deleteData, args.environment_context)
          except NoSuchObjectException as o1:
            result.o1 = o1
          except MetaException as o2:
            result.o2 = o2
    - oprot.writeMessageBegin("drop_partition_by_name", TMessageType.REPLY, seqid)
    + oprot.writeMessageBegin("drop_partition_with_environment_context", TMessageType.REPLY, seqid)
          result.write(oprot)
          oprot.writeMessageEnd()
          oprot.trans.flush()

    - def process_get_partition(self, seqid, iprot, oprot):
    - args = get_partition_args()
    + def process_drop_partition_by_name(self, seqid, iprot, oprot):
    + args = drop_partition_by_name_args()
          args.read(iprot)
          iprot.readMessageEnd()
    - result = get_partition_result()
    + result = drop_partition_by_name_result()
    + try:
    + result.success = self._handler.drop_partition_by_name(args.db_name, args.tbl_name, args.part_name, args.deleteData)
    + except NoSuchObjectException as o1:
    + result.o1 = o1
    + except MetaException as o2:
    + result.o2 = o2
    + oprot.writeMessageBegin("drop_partition_by_name", TMessageType.REPLY, seqid)
    + result.write(oprot)
    + oprot.writeMessageEnd()
    + oprot.trans.flush()
    +
    + def process_drop_partition_by_name_with_environment_context(self, seqid, iprot, oprot):
    + args = drop_partition_by_name_with_environment_context_args()
    + args.read(iprot)
    + iprot.readMessageEnd()
    + result = drop_partition_by_name_with_environment_context_result()
    + try:
    + result.success = self._handler.drop_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.deleteData, args.environment_context)
    + except NoSuchObjectException as o1:
    + result.o1 = o1
    + except MetaException as o2:
    + result.o2 = o2
    + oprot.writeMessageBegin("drop_partition_by_name_with_environment_context", TMessageType.REPLY, seqid)
    + result.write(oprot)
    + oprot.writeMessageEnd()
    + oprot.trans.flush()
    +
    + def process_get_partition(self, seqid, iprot, oprot):
    + args = get_partition_args()
    + args.read(iprot)
    + iprot.readMessageEnd()
    + result = get_partition_result()
          try:
            result.success = self._handler.get_partition(args.db_name, args.tbl_name, args.part_vals)
          except MetaException as o1:
    @@ -7070,6 +7417,177 @@ class drop_table_result:
        def __ne__(self, other):
          return not (self == other)

    +class drop_table_with_environment_context_args:
    + """
    + Attributes:
    + - dbname
    + - name
    + - deleteData
    + - environment_context
    + """
    +
    + thrift_spec = (
    + None, # 0
    + (1, TType.STRING, 'dbname', None, None, ), # 1
    + (2, TType.STRING, 'name', None, None, ), # 2
    + (3, TType.BOOL, 'deleteData', None, None, ), # 3
    + (4, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 4
    + )
    +
    + def __init__(self, dbname=None, name=None, deleteData=None, environment_context=None,):
    + self.dbname = dbname
    + self.name = name
    + self.deleteData = deleteData
    + self.environment_context = environment_context
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 1:
    + if ftype == TType.STRING:
    + self.dbname = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRING:
    + self.name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 3:
    + if ftype == TType.BOOL:
    + self.deleteData = iprot.readBool();
    + else:
    + iprot.skip(ftype)
    + elif fid == 4:
    + if ftype == TType.STRUCT:
    + self.environment_context = EnvironmentContext()
    + self.environment_context.read(iprot)
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('drop_table_with_environment_context_args')
    + if self.dbname is not None:
    + oprot.writeFieldBegin('dbname', TType.STRING, 1)
    + oprot.writeString(self.dbname)
    + oprot.writeFieldEnd()
    + if self.name is not None:
    + oprot.writeFieldBegin('name', TType.STRING, 2)
    + oprot.writeString(self.name)
    + oprot.writeFieldEnd()
    + if self.deleteData is not None:
    + oprot.writeFieldBegin('deleteData', TType.BOOL, 3)
    + oprot.writeBool(self.deleteData)
    + oprot.writeFieldEnd()
    + if self.environment_context is not None:
    + oprot.writeFieldBegin('environment_context', TType.STRUCT, 4)
    + self.environment_context.write(oprot)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
    +class drop_table_with_environment_context_result:
    + """
    + Attributes:
    + - o1
    + - o3
    + """
    +
    + thrift_spec = (
    + None, # 0
    + (1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
    + (2, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 2
    + )
    +
    + def __init__(self, o1=None, o3=None,):
    + self.o1 = o1
    + self.o3 = o3
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 1:
    + if ftype == TType.STRUCT:
    + self.o1 = NoSuchObjectException()
    + self.o1.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRUCT:
    + self.o3 = MetaException()
    + self.o3.read(iprot)
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('drop_table_with_environment_context_result')
    + if self.o1 is not None:
    + oprot.writeFieldBegin('o1', TType.STRUCT, 1)
    + self.o1.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o3 is not None:
    + oprot.writeFieldBegin('o3', TType.STRUCT, 2)
    + self.o3.write(oprot)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
      class get_tables_args:
        """
        Attributes:
    @@ -8919,25 +9437,28 @@ class append_partition_result:
        def __ne__(self, other):
          return not (self == other)

    -class append_partition_by_name_args:
    +class append_partition_with_environment_context_args:
        """
        Attributes:
         - db_name
         - tbl_name
    - - part_name
    + - part_vals
    + - environment_context
        """

        thrift_spec = (
          None, # 0
          (1, TType.STRING, 'db_name', None, None, ), # 1
          (2, TType.STRING, 'tbl_name', None, None, ), # 2
    - (3, TType.STRING, 'part_name', None, None, ), # 3
    + (3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
    + (4, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 4
        )

    - def __init__(self, db_name=None, tbl_name=None, part_name=None,):
    + def __init__(self, db_name=None, tbl_name=None, part_vals=None, environment_context=None,):
          self.db_name = db_name
          self.tbl_name = tbl_name
    - self.part_name = part_name
    + self.part_vals = part_vals
    + self.environment_context = environment_context

        def read(self, iprot):
          if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    @@ -8959,8 +9480,19 @@ class append_partition_by_name_args:
              else:
                iprot.skip(ftype)
            elif fid == 3:
    - if ftype == TType.STRING:
    - self.part_name = iprot.readString();
    + if ftype == TType.LIST:
    + self.part_vals = []
    + (_etype316, _size313) = iprot.readListBegin()
    + for _i317 in xrange(_size313):
    + _elem318 = iprot.readString();
    + self.part_vals.append(_elem318)
    + iprot.readListEnd()
    + else:
    + iprot.skip(ftype)
    + elif fid == 4:
    + if ftype == TType.STRUCT:
    + self.environment_context = EnvironmentContext()
    + self.environment_context.read(iprot)
              else:
                iprot.skip(ftype)
            else:
    @@ -8972,7 +9504,7 @@ class append_partition_by_name_args:
          if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
            oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
            return
    - oprot.writeStructBegin('append_partition_by_name_args')
    + oprot.writeStructBegin('append_partition_with_environment_context_args')
          if self.db_name is not None:
            oprot.writeFieldBegin('db_name', TType.STRING, 1)
            oprot.writeString(self.db_name)
    @@ -8981,9 +9513,16 @@ class append_partition_by_name_args:
            oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
            oprot.writeString(self.tbl_name)
            oprot.writeFieldEnd()
    - if self.part_name is not None:
    - oprot.writeFieldBegin('part_name', TType.STRING, 3)
    - oprot.writeString(self.part_name)
    + if self.part_vals is not None:
    + oprot.writeFieldBegin('part_vals', TType.LIST, 3)
    + oprot.writeListBegin(TType.STRING, len(self.part_vals))
    + for iter319 in self.part_vals:
    + oprot.writeString(iter319)
    + oprot.writeListEnd()
    + oprot.writeFieldEnd()
    + if self.environment_context is not None:
    + oprot.writeFieldBegin('environment_context', TType.STRUCT, 4)
    + self.environment_context.write(oprot)
            oprot.writeFieldEnd()
          oprot.writeFieldStop()
          oprot.writeStructEnd()
    @@ -9003,7 +9542,7 @@ class append_partition_by_name_args:
        def __ne__(self, other):
          return not (self == other)

    -class append_partition_by_name_result:
    +class append_partition_with_environment_context_result:
        """
        Attributes:
         - success
    @@ -9013,17 +9552,798 @@ class append_partition_by_name_result:
        """

        thrift_spec = (
    - (0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
    - (1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
    - (2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
    - (3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
    + (0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
    + (1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
    + (2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
    + (3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
    + )
    +
    + def __init__(self, success=None, o1=None, o2=None, o3=None,):
    + self.success = success
    + self.o1 = o1
    + self.o2 = o2
    + self.o3 = o3
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 0:
    + if ftype == TType.STRUCT:
    + self.success = Partition()
    + self.success.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 1:
    + if ftype == TType.STRUCT:
    + self.o1 = InvalidObjectException()
    + self.o1.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRUCT:
    + self.o2 = AlreadyExistsException()
    + self.o2.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 3:
    + if ftype == TType.STRUCT:
    + self.o3 = MetaException()
    + self.o3.read(iprot)
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('append_partition_with_environment_context_result')
    + if self.success is not None:
    + oprot.writeFieldBegin('success', TType.STRUCT, 0)
    + self.success.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o1 is not None:
    + oprot.writeFieldBegin('o1', TType.STRUCT, 1)
    + self.o1.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o2 is not None:
    + oprot.writeFieldBegin('o2', TType.STRUCT, 2)
    + self.o2.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o3 is not None:
    + oprot.writeFieldBegin('o3', TType.STRUCT, 3)
    + self.o3.write(oprot)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
    +class append_partition_by_name_args:
    + """
    + Attributes:
    + - db_name
    + - tbl_name
    + - part_name
    + """
    +
    + thrift_spec = (
    + None, # 0
    + (1, TType.STRING, 'db_name', None, None, ), # 1
    + (2, TType.STRING, 'tbl_name', None, None, ), # 2
    + (3, TType.STRING, 'part_name', None, None, ), # 3
    + )
    +
    + def __init__(self, db_name=None, tbl_name=None, part_name=None,):
    + self.db_name = db_name
    + self.tbl_name = tbl_name
    + self.part_name = part_name
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 1:
    + if ftype == TType.STRING:
    + self.db_name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRING:
    + self.tbl_name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 3:
    + if ftype == TType.STRING:
    + self.part_name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('append_partition_by_name_args')
    + if self.db_name is not None:
    + oprot.writeFieldBegin('db_name', TType.STRING, 1)
    + oprot.writeString(self.db_name)
    + oprot.writeFieldEnd()
    + if self.tbl_name is not None:
    + oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
    + oprot.writeString(self.tbl_name)
    + oprot.writeFieldEnd()
    + if self.part_name is not None:
    + oprot.writeFieldBegin('part_name', TType.STRING, 3)
    + oprot.writeString(self.part_name)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
    +class append_partition_by_name_result:
    + """
    + Attributes:
    + - success
    + - o1
    + - o2
    + - o3
    + """
    +
    + thrift_spec = (
    + (0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
    + (1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
    + (2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
    + (3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
    + )
    +
    + def __init__(self, success=None, o1=None, o2=None, o3=None,):
    + self.success = success
    + self.o1 = o1
    + self.o2 = o2
    + self.o3 = o3
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 0:
    + if ftype == TType.STRUCT:
    + self.success = Partition()
    + self.success.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 1:
    + if ftype == TType.STRUCT:
    + self.o1 = InvalidObjectException()
    + self.o1.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRUCT:
    + self.o2 = AlreadyExistsException()
    + self.o2.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 3:
    + if ftype == TType.STRUCT:
    + self.o3 = MetaException()
    + self.o3.read(iprot)
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('append_partition_by_name_result')
    + if self.success is not None:
    + oprot.writeFieldBegin('success', TType.STRUCT, 0)
    + self.success.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o1 is not None:
    + oprot.writeFieldBegin('o1', TType.STRUCT, 1)
    + self.o1.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o2 is not None:
    + oprot.writeFieldBegin('o2', TType.STRUCT, 2)
    + self.o2.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o3 is not None:
    + oprot.writeFieldBegin('o3', TType.STRUCT, 3)
    + self.o3.write(oprot)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
    +class append_partition_by_name_with_environment_context_args:
    + """
    + Attributes:
    + - db_name
    + - tbl_name
    + - part_name
    + - environment_context
    + """
    +
    + thrift_spec = (
    + None, # 0
    + (1, TType.STRING, 'db_name', None, None, ), # 1
    + (2, TType.STRING, 'tbl_name', None, None, ), # 2
    + (3, TType.STRING, 'part_name', None, None, ), # 3
    + (4, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 4
    + )
    +
    + def __init__(self, db_name=None, tbl_name=None, part_name=None, environment_context=None,):
    + self.db_name = db_name
    + self.tbl_name = tbl_name
    + self.part_name = part_name
    + self.environment_context = environment_context
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 1:
    + if ftype == TType.STRING:
    + self.db_name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRING:
    + self.tbl_name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 3:
    + if ftype == TType.STRING:
    + self.part_name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 4:
    + if ftype == TType.STRUCT:
    + self.environment_context = EnvironmentContext()
    + self.environment_context.read(iprot)
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('append_partition_by_name_with_environment_context_args')
    + if self.db_name is not None:
    + oprot.writeFieldBegin('db_name', TType.STRING, 1)
    + oprot.writeString(self.db_name)
    + oprot.writeFieldEnd()
    + if self.tbl_name is not None:
    + oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
    + oprot.writeString(self.tbl_name)
    + oprot.writeFieldEnd()
    + if self.part_name is not None:
    + oprot.writeFieldBegin('part_name', TType.STRING, 3)
    + oprot.writeString(self.part_name)
    + oprot.writeFieldEnd()
    + if self.environment_context is not None:
    + oprot.writeFieldBegin('environment_context', TType.STRUCT, 4)
    + self.environment_context.write(oprot)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
    +class append_partition_by_name_with_environment_context_result:
    + """
    + Attributes:
    + - success
    + - o1
    + - o2
    + - o3
    + """
    +
    + thrift_spec = (
    + (0, TType.STRUCT, 'success', (Partition, Partition.thrift_spec), None, ), # 0
    + (1, TType.STRUCT, 'o1', (InvalidObjectException, InvalidObjectException.thrift_spec), None, ), # 1
    + (2, TType.STRUCT, 'o2', (AlreadyExistsException, AlreadyExistsException.thrift_spec), None, ), # 2
    + (3, TType.STRUCT, 'o3', (MetaException, MetaException.thrift_spec), None, ), # 3
    + )
    +
    + def __init__(self, success=None, o1=None, o2=None, o3=None,):
    + self.success = success
    + self.o1 = o1
    + self.o2 = o2
    + self.o3 = o3
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 0:
    + if ftype == TType.STRUCT:
    + self.success = Partition()
    + self.success.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 1:
    + if ftype == TType.STRUCT:
    + self.o1 = InvalidObjectException()
    + self.o1.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRUCT:
    + self.o2 = AlreadyExistsException()
    + self.o2.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 3:
    + if ftype == TType.STRUCT:
    + self.o3 = MetaException()
    + self.o3.read(iprot)
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('append_partition_by_name_with_environment_context_result')
    + if self.success is not None:
    + oprot.writeFieldBegin('success', TType.STRUCT, 0)
    + self.success.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o1 is not None:
    + oprot.writeFieldBegin('o1', TType.STRUCT, 1)
    + self.o1.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o2 is not None:
    + oprot.writeFieldBegin('o2', TType.STRUCT, 2)
    + self.o2.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o3 is not None:
    + oprot.writeFieldBegin('o3', TType.STRUCT, 3)
    + self.o3.write(oprot)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
    +class drop_partition_args:
    + """
    + Attributes:
    + - db_name
    + - tbl_name
    + - part_vals
    + - deleteData
    + """
    +
    + thrift_spec = (
    + None, # 0
    + (1, TType.STRING, 'db_name', None, None, ), # 1
    + (2, TType.STRING, 'tbl_name', None, None, ), # 2
    + (3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
    + (4, TType.BOOL, 'deleteData', None, None, ), # 4
    + )
    +
    + def __init__(self, db_name=None, tbl_name=None, part_vals=None, deleteData=None,):
    + self.db_name = db_name
    + self.tbl_name = tbl_name
    + self.part_vals = part_vals
    + self.deleteData = deleteData
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 1:
    + if ftype == TType.STRING:
    + self.db_name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRING:
    + self.tbl_name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 3:
    + if ftype == TType.LIST:
    + self.part_vals = []
    + (_etype323, _size320) = iprot.readListBegin()
    + for _i324 in xrange(_size320):
    + _elem325 = iprot.readString();
    + self.part_vals.append(_elem325)
    + iprot.readListEnd()
    + else:
    + iprot.skip(ftype)
    + elif fid == 4:
    + if ftype == TType.BOOL:
    + self.deleteData = iprot.readBool();
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('drop_partition_args')
    + if self.db_name is not None:
    + oprot.writeFieldBegin('db_name', TType.STRING, 1)
    + oprot.writeString(self.db_name)
    + oprot.writeFieldEnd()
    + if self.tbl_name is not None:
    + oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
    + oprot.writeString(self.tbl_name)
    + oprot.writeFieldEnd()
    + if self.part_vals is not None:
    + oprot.writeFieldBegin('part_vals', TType.LIST, 3)
    + oprot.writeListBegin(TType.STRING, len(self.part_vals))
    + for iter326 in self.part_vals:
    + oprot.writeString(iter326)
    + oprot.writeListEnd()
    + oprot.writeFieldEnd()
    + if self.deleteData is not None:
    + oprot.writeFieldBegin('deleteData', TType.BOOL, 4)
    + oprot.writeBool(self.deleteData)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
    +class drop_partition_result:
    + """
    + Attributes:
    + - success
    + - o1
    + - o2
    + """
    +
    + thrift_spec = (
    + (0, TType.BOOL, 'success', None, None, ), # 0
    + (1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
    + (2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
    + )
    +
    + def __init__(self, success=None, o1=None, o2=None,):
    + self.success = success
    + self.o1 = o1
    + self.o2 = o2
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 0:
    + if ftype == TType.BOOL:
    + self.success = iprot.readBool();
    + else:
    + iprot.skip(ftype)
    + elif fid == 1:
    + if ftype == TType.STRUCT:
    + self.o1 = NoSuchObjectException()
    + self.o1.read(iprot)
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRUCT:
    + self.o2 = MetaException()
    + self.o2.read(iprot)
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('drop_partition_result')
    + if self.success is not None:
    + oprot.writeFieldBegin('success', TType.BOOL, 0)
    + oprot.writeBool(self.success)
    + oprot.writeFieldEnd()
    + if self.o1 is not None:
    + oprot.writeFieldBegin('o1', TType.STRUCT, 1)
    + self.o1.write(oprot)
    + oprot.writeFieldEnd()
    + if self.o2 is not None:
    + oprot.writeFieldBegin('o2', TType.STRUCT, 2)
    + self.o2.write(oprot)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
    +class drop_partition_with_environment_context_args:
    + """
    + Attributes:
    + - db_name
    + - tbl_name
    + - part_vals
    + - deleteData
    + - environment_context
    + """
    +
    + thrift_spec = (
    + None, # 0
    + (1, TType.STRING, 'db_name', None, None, ), # 1
    + (2, TType.STRING, 'tbl_name', None, None, ), # 2
    + (3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
    + (4, TType.BOOL, 'deleteData', None, None, ), # 4
    + (5, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 5
    + )
    +
    + def __init__(self, db_name=None, tbl_name=None, part_vals=None, deleteData=None, environment_context=None,):
    + self.db_name = db_name
    + self.tbl_name = tbl_name
    + self.part_vals = part_vals
    + self.deleteData = deleteData
    + self.environment_context = environment_context
    +
    + def read(self, iprot):
    + if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    + fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
    + return
    + iprot.readStructBegin()
    + while True:
    + (fname, ftype, fid) = iprot.readFieldBegin()
    + if ftype == TType.STOP:
    + break
    + if fid == 1:
    + if ftype == TType.STRING:
    + self.db_name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 2:
    + if ftype == TType.STRING:
    + self.tbl_name = iprot.readString();
    + else:
    + iprot.skip(ftype)
    + elif fid == 3:
    + if ftype == TType.LIST:
    + self.part_vals = []
    + (_etype330, _size327) = iprot.readListBegin()
    + for _i331 in xrange(_size327):
    + _elem332 = iprot.readString();
    + self.part_vals.append(_elem332)
    + iprot.readListEnd()
    + else:
    + iprot.skip(ftype)
    + elif fid == 4:
    + if ftype == TType.BOOL:
    + self.deleteData = iprot.readBool();
    + else:
    + iprot.skip(ftype)
    + elif fid == 5:
    + if ftype == TType.STRUCT:
    + self.environment_context = EnvironmentContext()
    + self.environment_context.read(iprot)
    + else:
    + iprot.skip(ftype)
    + else:
    + iprot.skip(ftype)
    + iprot.readFieldEnd()
    + iprot.readStructEnd()
    +
    + def write(self, oprot):
    + if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
    + oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
    + return
    + oprot.writeStructBegin('drop_partition_with_environment_context_args')
    + if self.db_name is not None:
    + oprot.writeFieldBegin('db_name', TType.STRING, 1)
    + oprot.writeString(self.db_name)
    + oprot.writeFieldEnd()
    + if self.tbl_name is not None:
    + oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
    + oprot.writeString(self.tbl_name)
    + oprot.writeFieldEnd()
    + if self.part_vals is not None:
    + oprot.writeFieldBegin('part_vals', TType.LIST, 3)
    + oprot.writeListBegin(TType.STRING, len(self.part_vals))
    + for iter333 in self.part_vals:
    + oprot.writeString(iter333)
    + oprot.writeListEnd()
    + oprot.writeFieldEnd()
    + if self.deleteData is not None:
    + oprot.writeFieldBegin('deleteData', TType.BOOL, 4)
    + oprot.writeBool(self.deleteData)
    + oprot.writeFieldEnd()
    + if self.environment_context is not None:
    + oprot.writeFieldBegin('environment_context', TType.STRUCT, 5)
    + self.environment_context.write(oprot)
    + oprot.writeFieldEnd()
    + oprot.writeFieldStop()
    + oprot.writeStructEnd()
    +
    + def validate(self):
    + return
    +
    +
    + def __repr__(self):
    + L = ['%s=%r' % (key, value)
    + for key, value in self.__dict__.iteritems()]
    + return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
    +
    + def __eq__(self, other):
    + return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
    +
    + def __ne__(self, other):
    + return not (self == other)
    +
    +class drop_partition_with_environment_context_result:
    + """
    + Attributes:
    + - success
    + - o1
    + - o2
    + """
    +
    + thrift_spec = (
    + (0, TType.BOOL, 'success', None, None, ), # 0
    + (1, TType.STRUCT, 'o1', (NoSuchObjectException, NoSuchObjectException.thrift_spec), None, ), # 1
    + (2, TType.STRUCT, 'o2', (MetaException, MetaException.thrift_spec), None, ), # 2
        )

    - def __init__(self, success=None, o1=None, o2=None, o3=None,):
    + def __init__(self, success=None, o1=None, o2=None,):
          self.success = success
          self.o1 = o1
          self.o2 = o2
    - self.o3 = o3

        def read(self, iprot):
          if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    @@ -9035,29 +10355,22 @@ class append_partition_by_name_result:
            if ftype == TType.STOP:
              break
            if fid == 0:
    - if ftype == TType.STRUCT:
    - self.success = Partition()
    - self.success.read(iprot)
    + if ftype == TType.BOOL:
    + self.success = iprot.readBool();
              else:
                iprot.skip(ftype)
            elif fid == 1:
              if ftype == TType.STRUCT:
    - self.o1 = InvalidObjectException()
    + self.o1 = NoSuchObjectException()
                self.o1.read(iprot)
              else:
                iprot.skip(ftype)
            elif fid == 2:
              if ftype == TType.STRUCT:
    - self.o2 = AlreadyExistsException()
    + self.o2 = MetaException()
                self.o2.read(iprot)
              else:
                iprot.skip(ftype)
    - elif fid == 3:
    - if ftype == TType.STRUCT:
    - self.o3 = MetaException()
    - self.o3.read(iprot)
    - else:
    - iprot.skip(ftype)
            else:
              iprot.skip(ftype)
            iprot.readFieldEnd()
    @@ -9067,10 +10380,10 @@ class append_partition_by_name_result:
          if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
            oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
            return
    - oprot.writeStructBegin('append_partition_by_name_result')
    + oprot.writeStructBegin('drop_partition_with_environment_context_result')
          if self.success is not None:
    - oprot.writeFieldBegin('success', TType.STRUCT, 0)
    - self.success.write(oprot)
    + oprot.writeFieldBegin('success', TType.BOOL, 0)
    + oprot.writeBool(self.success)
            oprot.writeFieldEnd()
          if self.o1 is not None:
            oprot.writeFieldBegin('o1', TType.STRUCT, 1)
    @@ -9080,10 +10393,6 @@ class append_partition_by_name_result:
            oprot.writeFieldBegin('o2', TType.STRUCT, 2)
            self.o2.write(oprot)
            oprot.writeFieldEnd()
    - if self.o3 is not None:
    - oprot.writeFieldBegin('o3', TType.STRUCT, 3)
    - self.o3.write(oprot)
    - oprot.writeFieldEnd()
          oprot.writeFieldStop()
          oprot.writeStructEnd()

    @@ -9102,12 +10411,12 @@ class append_partition_by_name_result:
        def __ne__(self, other):
          return not (self == other)

    -class drop_partition_args:
    +class drop_partition_by_name_args:
        """
        Attributes:
         - db_name
         - tbl_name
    - - part_vals
    + - part_name
         - deleteData
        """

    @@ -9115,14 +10424,14 @@ class drop_partition_args:
          None, # 0
          (1, TType.STRING, 'db_name', None, None, ), # 1
          (2, TType.STRING, 'tbl_name', None, None, ), # 2
    - (3, TType.LIST, 'part_vals', (TType.STRING,None), None, ), # 3
    + (3, TType.STRING, 'part_name', None, None, ), # 3
          (4, TType.BOOL, 'deleteData', None, None, ), # 4
        )

    - def __init__(self, db_name=None, tbl_name=None, part_vals=None, deleteData=None,):
    + def __init__(self, db_name=None, tbl_name=None, part_name=None, deleteData=None,):
          self.db_name = db_name
          self.tbl_name = tbl_name
    - self.part_vals = part_vals
    + self.part_name = part_name
          self.deleteData = deleteData

        def read(self, iprot):
    @@ -9145,13 +10454,8 @@ class drop_partition_args:
              else:
                iprot.skip(ftype)
            elif fid == 3:
    - if ftype == TType.LIST:
    - self.part_vals = []
    - (_etype316, _size313) = iprot.readListBegin()
    - for _i317 in xrange(_size313):
    - _elem318 = iprot.readString();
    - self.part_vals.append(_elem318)
    - iprot.readListEnd()
    + if ftype == TType.STRING:
    + self.part_name = iprot.readString();
              else:
                iprot.skip(ftype)
            elif fid == 4:
    @@ -9168,7 +10472,7 @@ class drop_partition_args:
          if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
            oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
            return
    - oprot.writeStructBegin('drop_partition_args')
    + oprot.writeStructBegin('drop_partition_by_name_args')
          if self.db_name is not None:
            oprot.writeFieldBegin('db_name', TType.STRING, 1)
            oprot.writeString(self.db_name)
    @@ -9177,12 +10481,9 @@ class drop_partition_args:
            oprot.writeFieldBegin('tbl_name', TType.STRING, 2)
            oprot.writeString(self.tbl_name)
            oprot.writeFieldEnd()
    - if self.part_vals is not None:
    - oprot.writeFieldBegin('part_vals', TType.LIST, 3)
    - oprot.writeListBegin(TType.STRING, len(self.part_vals))
    - for iter319 in self.part_vals:
    - oprot.writeString(iter319)
    - oprot.writeListEnd()
    + if self.part_name is not None:
    + oprot.writeFieldBegin('part_name', TType.STRING, 3)
    + oprot.writeString(self.part_name)
            oprot.writeFieldEnd()
          if self.deleteData is not None:
            oprot.writeFieldBegin('deleteData', TType.BOOL, 4)
    @@ -9206,7 +10507,7 @@ class drop_partition_args:
        def __ne__(self, other):
          return not (self == other)

    -class drop_partition_result:
    +class drop_partition_by_name_result:
        """
        Attributes:
         - success
    @@ -9260,7 +10561,7 @@ class drop_partition_result:
          if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
            oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
            return
    - oprot.writeStructBegin('drop_partition_result')
    + oprot.writeStructBegin('drop_partition_by_name_result')
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.BOOL, 0)
            oprot.writeBool(self.success)
    @@ -9291,13 +10592,14 @@ class drop_partition_result:
        def __ne__(self, other):
          return not (self == other)

    -class drop_partition_by_name_args:
    +class drop_partition_by_name_with_environment_context_args:
        """
        Attributes:
         - db_name
         - tbl_name
         - part_name
         - deleteData
    + - environment_context
        """

        thrift_spec = (
    @@ -9306,13 +10608,15 @@ class drop_partition_by_name_args:
          (2, TType.STRING, 'tbl_name', None, None, ), # 2
          (3, TType.STRING, 'part_name', None, None, ), # 3
          (4, TType.BOOL, 'deleteData', None, None, ), # 4
    + (5, TType.STRUCT, 'environment_context', (EnvironmentContext, EnvironmentContext.thrift_spec), None, ), # 5
        )

    - def __init__(self, db_name=None, tbl_name=None, part_name=None, deleteData=None,):
    + def __init__(self, db_name=None, tbl_name=None, part_name=None, deleteData=None, environment_context=None,):
          self.db_name = db_name
          self.tbl_name = tbl_name
          self.part_name = part_name
          self.deleteData = deleteData
    + self.environment_context = environment_context

        def read(self, iprot):
          if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
    @@ -9343,6 +10647,12 @@ class drop_partition_by_name_args:
                self.deleteData = iprot.readBool();
              else:
                iprot.skip(ftype)
    + elif fid == 5:
    + if ftype == TType.STRUCT:
    + self.environment_context = EnvironmentContext()
    + self.environment_context.read(iprot)
    + else:
    + iprot.skip(ftype)
            else:
              iprot.skip(ftype)
            iprot.readFieldEnd()
    @@ -9352,7 +10662,7 @@ class drop_partition_by_name_args:
          if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
            oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
            return
    - oprot.writeStructBegin('drop_partition_by_name_args')
    + oprot.writeStructBegin('drop_partition_by_name_with_environment_context_args')
          if self.db_name is not None:
            oprot.writeFieldBegin('db_name', TType.STRING, 1)
            oprot.writeString(self.db_name)
    @@ -9369,6 +10679,10 @@ class drop_partition_by_name_args:
            oprot.writeFieldBegin('deleteData', TType.BOOL, 4)
            oprot.writeBool(self.deleteData)
            oprot.writeFieldEnd()
    + if self.environment_context is not None:
    + oprot.writeFieldBegin('environment_context', TType.STRUCT, 5)
    + self.environment_context.write(oprot)
    + oprot.writeFieldEnd()
          oprot.writeFieldStop()
          oprot.writeStructEnd()

    @@ -9387,7 +10701,7 @@ class drop_partition_by_name_args:
        def __ne__(self, other):
          return not (self == other)

    -class drop_partition_by_name_result:
    +class drop_partition_by_name_with_environment_context_result:
        """
        Attributes:
         - success
    @@ -9441,7 +10755,7 @@ class drop_partition_by_name_result:
          if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
            oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
            return
    - oprot.writeStructBegin('drop_partition_by_name_result')
    + oprot.writeStructBegin('drop_partition_by_name_with_environment_context_result')
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.BOOL, 0)
            oprot.writeBool(self.success)
    @@ -9514,10 +10828,10 @@ class get_partition_args:
            elif fid == 3:
              if ftype == TType.LIST:
                self.part_vals = []
    - (_etype323, _size320) = iprot.readListBegin()
    - for _i324 in xrange(_size320):
    - _elem325 = iprot.readString();
    - self.part_vals.append(_elem325)
    + (_etype337, _size334) = iprot.readListBegin()
    + for _i338 in xrange(_size334):
    + _elem339 = iprot.readString();
    + self.part_vals.append(_elem339)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -9542,8 +10856,8 @@ class get_partition_args:
          if self.part_vals is not None:
            oprot.writeFieldBegin('part_vals', TType.LIST, 3)
            oprot.writeListBegin(TType.STRING, len(self.part_vals))
    - for iter326 in self.part_vals:
    - oprot.writeString(iter326)
    + for iter340 in self.part_vals:
    + oprot.writeString(iter340)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          oprot.writeFieldStop()
    @@ -9698,10 +11012,10 @@ class get_partition_with_auth_args:
            elif fid == 3:
              if ftype == TType.LIST:
                self.part_vals = []
    - (_etype330, _size327) = iprot.readListBegin()
    - for _i331 in xrange(_size327):
    - _elem332 = iprot.readString();
    - self.part_vals.append(_elem332)
    + (_etype344, _size341) = iprot.readListBegin()
    + for _i345 in xrange(_size341):
    + _elem346 = iprot.readString();
    + self.part_vals.append(_elem346)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -9713,10 +11027,10 @@ class get_partition_with_auth_args:
            elif fid == 5:
              if ftype == TType.LIST:
                self.group_names = []
    - (_etype336, _size333) = iprot.readListBegin()
    - for _i337 in xrange(_size333):
    - _elem338 = iprot.readString();
    - self.group_names.append(_elem338)
    + (_etype350, _size347) = iprot.readListBegin()
    + for _i351 in xrange(_size347):
    + _elem352 = iprot.readString();
    + self.group_names.append(_elem352)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -9741,8 +11055,8 @@ class get_partition_with_auth_args:
          if self.part_vals is not None:
            oprot.writeFieldBegin('part_vals', TType.LIST, 3)
            oprot.writeListBegin(TType.STRING, len(self.part_vals))
    - for iter339 in self.part_vals:
    - oprot.writeString(iter339)
    + for iter353 in self.part_vals:
    + oprot.writeString(iter353)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.user_name is not None:
    @@ -9752,8 +11066,8 @@ class get_partition_with_auth_args:
          if self.group_names is not None:
            oprot.writeFieldBegin('group_names', TType.LIST, 5)
            oprot.writeListBegin(TType.STRING, len(self.group_names))
    - for iter340 in self.group_names:
    - oprot.writeString(iter340)
    + for iter354 in self.group_names:
    + oprot.writeString(iter354)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          oprot.writeFieldStop()
    @@ -10145,11 +11459,11 @@ class get_partitions_result:
            if fid == 0:
              if ftype == TType.LIST:
                self.success = []
    - (_etype344, _size341) = iprot.readListBegin()
    - for _i345 in xrange(_size341):
    - _elem346 = Partition()
    - _elem346.read(iprot)
    - self.success.append(_elem346)
    + (_etype358, _size355) = iprot.readListBegin()
    + for _i359 in xrange(_size355):
    + _elem360 = Partition()
    + _elem360.read(iprot)
    + self.success.append(_elem360)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -10178,8 +11492,8 @@ class get_partitions_result:
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.LIST, 0)
            oprot.writeListBegin(TType.STRUCT, len(self.success))
    - for iter347 in self.success:
    - iter347.write(oprot)
    + for iter361 in self.success:
    + iter361.write(oprot)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.o1 is not None:
    @@ -10266,10 +11580,10 @@ class get_partitions_with_auth_args:
            elif fid == 5:
              if ftype == TType.LIST:
                self.group_names = []
    - (_etype351, _size348) = iprot.readListBegin()
    - for _i352 in xrange(_size348):
    - _elem353 = iprot.readString();
    - self.group_names.append(_elem353)
    + (_etype365, _size362) = iprot.readListBegin()
    + for _i366 in xrange(_size362):
    + _elem367 = iprot.readString();
    + self.group_names.append(_elem367)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -10302,8 +11616,8 @@ class get_partitions_with_auth_args:
          if self.group_names is not None:
            oprot.writeFieldBegin('group_names', TType.LIST, 5)
            oprot.writeListBegin(TType.STRING, len(self.group_names))
    - for iter354 in self.group_names:
    - oprot.writeString(iter354)
    + for iter368 in self.group_names:
    + oprot.writeString(iter368)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          oprot.writeFieldStop()
    @@ -10355,11 +11669,11 @@ class get_partitions_with_auth_result:
            if fid == 0:
              if ftype == TType.LIST:
                self.success = []
    - (_etype358, _size355) = iprot.readListBegin()
    - for _i359 in xrange(_size355):
    - _elem360 = Partition()
    - _elem360.read(iprot)
    - self.success.append(_elem360)
    + (_etype372, _size369) = iprot.readListBegin()
    + for _i373 in xrange(_size369):
    + _elem374 = Partition()
    + _elem374.read(iprot)
    + self.success.append(_elem374)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -10388,8 +11702,8 @@ class get_partitions_with_auth_result:
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.LIST, 0)
            oprot.writeListBegin(TType.STRUCT, len(self.success))
    - for iter361 in self.success:
    - iter361.write(oprot)
    + for iter375 in self.success:
    + iter375.write(oprot)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.o1 is not None:
    @@ -10530,10 +11844,10 @@ class get_partition_names_result:
            if fid == 0:
              if ftype == TType.LIST:
                self.success = []
    - (_etype365, _size362) = iprot.readListBegin()
    - for _i366 in xrange(_size362):
    - _elem367 = iprot.readString();
    - self.success.append(_elem367)
    + (_etype379, _size376) = iprot.readListBegin()
    + for _i380 in xrange(_size376):
    + _elem381 = iprot.readString();
    + self.success.append(_elem381)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -10556,8 +11870,8 @@ class get_partition_names_result:
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.LIST, 0)
            oprot.writeListBegin(TType.STRING, len(self.success))
    - for iter368 in self.success:
    - oprot.writeString(iter368)
    + for iter382 in self.success:
    + oprot.writeString(iter382)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.o2 is not None:
    @@ -10627,10 +11941,10 @@ class get_partitions_ps_args:
            elif fid == 3:
              if ftype == TType.LIST:
                self.part_vals = []
    - (_etype372, _size369) = iprot.readListBegin()
    - for _i373 in xrange(_size369):
    - _elem374 = iprot.readString();
    - self.part_vals.append(_elem374)
    + (_etype386, _size383) = iprot.readListBegin()
    + for _i387 in xrange(_size383):
    + _elem388 = iprot.readString();
    + self.part_vals.append(_elem388)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -10660,8 +11974,8 @@ class get_partitions_ps_args:
          if self.part_vals is not None:
            oprot.writeFieldBegin('part_vals', TType.LIST, 3)
            oprot.writeListBegin(TType.STRING, len(self.part_vals))
    - for iter375 in self.part_vals:
    - oprot.writeString(iter375)
    + for iter389 in self.part_vals:
    + oprot.writeString(iter389)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.max_parts is not None:
    @@ -10717,11 +12031,11 @@ class get_partitions_ps_result:
            if fid == 0:
              if ftype == TType.LIST:
                self.success = []
    - (_etype379, _size376) = iprot.readListBegin()
    - for _i380 in xrange(_size376):
    - _elem381 = Partition()
    - _elem381.read(iprot)
    - self.success.append(_elem381)
    + (_etype393, _size390) = iprot.readListBegin()
    + for _i394 in xrange(_size390):
    + _elem395 = Partition()
    + _elem395.read(iprot)
    + self.success.append(_elem395)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -10750,8 +12064,8 @@ class get_partitions_ps_result:
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.LIST, 0)
            oprot.writeListBegin(TType.STRUCT, len(self.success))
    - for iter382 in self.success:
    - iter382.write(oprot)
    + for iter396 in self.success:
    + iter396.write(oprot)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.o1 is not None:
    @@ -10831,10 +12145,10 @@ class get_partitions_ps_with_auth_args:
            elif fid == 3:
              if ftype == TType.LIST:
                self.part_vals = []
    - (_etype386, _size383) = iprot.readListBegin()
    - for _i387 in xrange(_size383):
    - _elem388 = iprot.readString();
    - self.part_vals.append(_elem388)
    + (_etype400, _size397) = iprot.readListBegin()
    + for _i401 in xrange(_size397):
    + _elem402 = iprot.readString();
    + self.part_vals.append(_elem402)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -10851,10 +12165,10 @@ class get_partitions_ps_with_auth_args:
            elif fid == 6:
              if ftype == TType.LIST:
                self.group_names = []
    - (_etype392, _size389) = iprot.readListBegin()
    - for _i393 in xrange(_size389):
    - _elem394 = iprot.readString();
    - self.group_names.append(_elem394)
    + (_etype406, _size403) = iprot.readListBegin()
    + for _i407 in xrange(_size403):
    + _elem408 = iprot.readString();
    + self.group_names.append(_elem408)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -10879,8 +12193,8 @@ class get_partitions_ps_with_auth_args:
          if self.part_vals is not None:
            oprot.writeFieldBegin('part_vals', TType.LIST, 3)
            oprot.writeListBegin(TType.STRING, len(self.part_vals))
    - for iter395 in self.part_vals:
    - oprot.writeString(iter395)
    + for iter409 in self.part_vals:
    + oprot.writeString(iter409)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.max_parts is not None:
    @@ -10894,8 +12208,8 @@ class get_partitions_ps_with_auth_args:
          if self.group_names is not None:
            oprot.writeFieldBegin('group_names', TType.LIST, 6)
            oprot.writeListBegin(TType.STRING, len(self.group_names))
    - for iter396 in self.group_names:
    - oprot.writeString(iter396)
    + for iter410 in self.group_names:
    + oprot.writeString(iter410)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          oprot.writeFieldStop()
    @@ -10947,11 +12261,11 @@ class get_partitions_ps_with_auth_result
            if fid == 0:
              if ftype == TType.LIST:
                self.success = []
    - (_etype400, _size397) = iprot.readListBegin()
    - for _i401 in xrange(_size397):
    - _elem402 = Partition()
    - _elem402.read(iprot)
    - self.success.append(_elem402)
    + (_etype414, _size411) = iprot.readListBegin()
    + for _i415 in xrange(_size411):
    + _elem416 = Partition()
    + _elem416.read(iprot)
    + self.success.append(_elem416)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -10980,8 +12294,8 @@ class get_partitions_ps_with_auth_result
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.LIST, 0)
            oprot.writeListBegin(TType.STRUCT, len(self.success))
    - for iter403 in self.success:
    - iter403.write(oprot)
    + for iter417 in self.success:
    + iter417.write(oprot)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.o1 is not None:
    @@ -11055,10 +12369,10 @@ class get_partition_names_ps_args:
            elif fid == 3:
              if ftype == TType.LIST:
                self.part_vals = []
    - (_etype407, _size404) = iprot.readListBegin()
    - for _i408 in xrange(_size404):
    - _elem409 = iprot.readString();
    - self.part_vals.append(_elem409)
    + (_etype421, _size418) = iprot.readListBegin()
    + for _i422 in xrange(_size418):
    + _elem423 = iprot.readString();
    + self.part_vals.append(_elem423)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -11088,8 +12402,8 @@ class get_partition_names_ps_args:
          if self.part_vals is not None:
            oprot.writeFieldBegin('part_vals', TType.LIST, 3)
            oprot.writeListBegin(TType.STRING, len(self.part_vals))
    - for iter410 in self.part_vals:
    - oprot.writeString(iter410)
    + for iter424 in self.part_vals:
    + oprot.writeString(iter424)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.max_parts is not None:
    @@ -11145,10 +12459,10 @@ class get_partition_names_ps_result:
            if fid == 0:
              if ftype == TType.LIST:
                self.success = []
    - (_etype414, _size411) = iprot.readListBegin()
    - for _i415 in xrange(_size411):
    - _elem416 = iprot.readString();
    - self.success.append(_elem416)
    + (_etype428, _size425) = iprot.readListBegin()
    + for _i429 in xrange(_size425):
    + _elem430 = iprot.readString();
    + self.success.append(_elem430)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -11177,8 +12491,8 @@ class get_partition_names_ps_result:
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.LIST, 0)
            oprot.writeListBegin(TType.STRING, len(self.success))
    - for iter417 in self.success:
    - oprot.writeString(iter417)
    + for iter431 in self.success:
    + oprot.writeString(iter431)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.o1 is not None:
    @@ -11334,11 +12648,11 @@ class get_partitions_by_filter_result:
            if fid == 0:
              if ftype == TType.LIST:
                self.success = []
    - (_etype421, _size418) = iprot.readListBegin()
    - for _i422 in xrange(_size418):
    - _elem423 = Partition()
    - _elem423.read(iprot)
    - self.success.append(_elem423)
    + (_etype435, _size432) = iprot.readListBegin()
    + for _i436 in xrange(_size432):
    + _elem437 = Partition()
    + _elem437.read(iprot)
    + self.success.append(_elem437)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -11367,8 +12681,8 @@ class get_partitions_by_filter_result:
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.LIST, 0)
            oprot.writeListBegin(TType.STRUCT, len(self.success))
    - for iter424 in self.success:
    - iter424.write(oprot)
    + for iter438 in self.success:
    + iter438.write(oprot)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.o1 is not None:
    @@ -11439,10 +12753,10 @@ class get_partitions_by_names_args:
            elif fid == 3:
              if ftype == TType.LIST:
                self.names = []
    - (_etype428, _size425) = iprot.readListBegin()
    - for _i429 in xrange(_size425):
    - _elem430 = iprot.readString();
    - self.names.append(_elem430)
    + (_etype442, _size439) = iprot.readListBegin()
    + for _i443 in xrange(_size439):
    + _elem444 = iprot.readString();
    + self.names.append(_elem444)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -11467,8 +12781,8 @@ class get_partitions_by_names_args:
          if self.names is not None:
            oprot.writeFieldBegin('names', TType.LIST, 3)
            oprot.writeListBegin(TType.STRING, len(self.names))
    - for iter431 in self.names:
    - oprot.writeString(iter431)
    + for iter445 in self.names:
    + oprot.writeString(iter445)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          oprot.writeFieldStop()
    @@ -11520,11 +12834,11 @@ class get_partitions_by_names_result:
            if fid == 0:
              if ftype == TType.LIST:
                self.success = []
    - (_etype435, _size432) = iprot.readListBegin()
    - for _i436 in xrange(_size432):
    - _elem437 = Partition()
    - _elem437.read(iprot)
    - self.success.append(_elem437)
    + (_etype449, _size446) = iprot.readListBegin()
    + for _i450 in xrange(_size446):
    + _elem451 = Partition()
    + _elem451.read(iprot)
    + self.success.append(_elem451)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -11553,8 +12867,8 @@ class get_partitions_by_names_result:
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.LIST, 0)
            oprot.writeListBegin(TType.STRUCT, len(self.success))
    - for iter438 in self.success:
    - iter438.write(oprot)
    + for iter452 in self.success:
    + iter452.write(oprot)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.o1 is not None:
    @@ -11784,11 +13098,11 @@ class alter_partitions_args:
            elif fid == 3:
              if ftype == TType.LIST:
                self.new_parts = []
    - (_etype442, _size439) = iprot.readListBegin()
    - for _i443 in xrange(_size439):
    - _elem444 = Partition()
    - _elem444.read(iprot)
    - self.new_parts.append(_elem444)
    + (_etype456, _size453) = iprot.readListBegin()
    + for _i457 in xrange(_size453):
    + _elem458 = Partition()
    + _elem458.read(iprot)
    + self.new_parts.append(_elem458)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -11813,8 +13127,8 @@ class alter_partitions_args:
          if self.new_parts is not None:
            oprot.writeFieldBegin('new_parts', TType.LIST, 3)
            oprot.writeListBegin(TType.STRUCT, len(self.new_parts))
    - for iter445 in self.new_parts:
    - iter445.write(oprot)
    + for iter459 in self.new_parts:
    + iter459.write(oprot)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          oprot.writeFieldStop()
    @@ -12126,10 +13440,10 @@ class rename_partition_args:
            elif fid == 3:
              if ftype == TType.LIST:
                self.part_vals = []
    - (_etype449, _size446) = iprot.readListBegin()
    - for _i450 in xrange(_size446):
    - _elem451 = iprot.readString();
    - self.part_vals.append(_elem451)
    + (_etype463, _size460) = iprot.readListBegin()
    + for _i464 in xrange(_size460):
    + _elem465 = iprot.readString();
    + self.part_vals.append(_elem465)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -12160,8 +13474,8 @@ class rename_partition_args:
          if self.part_vals is not None:
            oprot.writeFieldBegin('part_vals', TType.LIST, 3)
            oprot.writeListBegin(TType.STRING, len(self.part_vals))
    - for iter452 in self.part_vals:
    - oprot.writeString(iter452)
    + for iter466 in self.part_vals:
    + oprot.writeString(iter466)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.new_part is not None:
    @@ -12492,10 +13806,10 @@ class partition_name_to_vals_result:
            if fid == 0:
              if ftype == TType.LIST:
                self.success = []
    - (_etype456, _size453) = iprot.readListBegin()
    - for _i457 in xrange(_size453):
    - _elem458 = iprot.readString();
    - self.success.append(_elem458)
    + (_etype470, _size467) = iprot.readListBegin()
    + for _i471 in xrange(_size467):
    + _elem472 = iprot.readString();
    + self.success.append(_elem472)
                iprot.readListEnd()
              else:
                iprot.skip(ftype)
    @@ -12518,8 +13832,8 @@ class partition_name_to_vals_result:
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.LIST, 0)
            oprot.writeListBegin(TType.STRING, len(self.success))
    - for iter459 in self.success:
    - oprot.writeString(iter459)
    + for iter473 in self.success:
    + oprot.writeString(iter473)
            oprot.writeListEnd()
            oprot.writeFieldEnd()
          if self.o1 is not None:
    @@ -12632,11 +13946,11 @@ class partition_name_to_spec_result:
            if fid == 0:
              if ftype == TType.MAP:
                self.success = {}
    - (_ktype461, _vtype462, _size460 ) = iprot.readMapBegin()
    - for _i464 in xrange(_size460):
    - _key465 = iprot.readString();
    - _val466 = iprot.readString();
    - self.success[_key465] = _val466
    + (_ktype475, _vtype476, _size474 ) = iprot.readMapBegin()
    + for _i478 in xrange(_size474):
    + _key479 = iprot.readString();
    + _val480 = iprot.readString();
    + self.success[_key479] = _val480
                iprot.readMapEnd()
              else:
                iprot.skip(ftype)
    @@ -12659,9 +13973,9 @@ class partition_name_to_spec_result:
          if self.success is not None:
            oprot.writeFieldBegin('success', TType.MAP, 0)
            oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.success))
    - for kiter467,viter468 in self.success.items():
    - oprot.writeString(kiter467)
    - oprot.writeString(viter468)
    + for kiter481,viter482 in self.success.items():
    + oprot.writeString(kiter481)
    + oprot.writeString(viter482)
            oprot.writeMapEnd()
            oprot.writeFieldEnd()
          if self.o1 is not None:
    @@ -12731,11 +14045,11 @@ class markPartitionForEvent_args:
            elif fid == 3:
              if ftype == TType.MAP:
                self.part_vals = {}
    - (_ktype470, _vtype471, _size469 ) = iprot.readMapBegin()
    - for _i473 in xrange(_size469):
    - _key474 = iprot.readString();
    - _val475 = iprot.readString();
    - self.part_vals[_key474] = _val475
    + (_ktype484, _vtype485, _size483 ) = iprot.readMapBegin()
    + for _i487 in xrange(_size483):
    + _key488 = iprot.readString();
    + _val489 = iprot.readString();
    + self.part_vals[_key488] = _val489
                iprot.readMapEnd()
              else:
                iprot.skip(ftype)
    @@ -12765,9 +14079,9 @@ class markPartitionForEvent_args:
          if self.part_vals is not None:
            oprot.writeFieldBegin('part_vals', TType.MAP, 3)
            oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.part_vals))

    [... 254 lines stripped ...]
  • Kevinwilfong at Feb 12, 2013 at 6:53 pm
    Modified: hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb (original)
    +++ hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb Tue Feb 12 18:52:55 2013
    @@ -267,6 +267,22 @@ module ThriftHiveMetastore
            return
          end

    + def drop_table_with_environment_context(dbname, name, deleteData, environment_context)
    + send_drop_table_with_environment_context(dbname, name, deleteData, environment_context)
    + recv_drop_table_with_environment_context()
    + end
    +
    + def send_drop_table_with_environment_context(dbname, name, deleteData, environment_context)
    + send_message('drop_table_with_environment_context', Drop_table_with_environment_context_args, :dbname => dbname, :name => name, :deleteData => deleteData, :environment_context => environment_context)
    + end
    +
    + def recv_drop_table_with_environment_context()
    + result = receive_message(Drop_table_with_environment_context_result)
    + raise result.o1 unless result.o1.nil?
    + raise result.o3 unless result.o3.nil?
    + return
    + end
    +
          def get_tables(db_name, pattern)
            send_get_tables(db_name, pattern)
            return recv_get_tables()
    @@ -456,6 +472,24 @@ module ThriftHiveMetastore
            raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'append_partition failed: unknown result')
          end

    + def append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context)
    + send_append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context)
    + return recv_append_partition_with_environment_context()
    + end
    +
    + def send_append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context)
    + send_message('append_partition_with_environment_context', Append_partition_with_environment_context_args, :db_name => db_name, :tbl_name => tbl_name, :part_vals => part_vals, :environment_context => environment_context)
    + end
    +
    + def recv_append_partition_with_environment_context()
    + result = receive_message(Append_partition_with_environment_context_result)
    + return result.success unless result.success.nil?
    + raise result.o1 unless result.o1.nil?
    + raise result.o2 unless result.o2.nil?
    + raise result.o3 unless result.o3.nil?
    + raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'append_partition_with_environment_context failed: unknown result')
    + end
    +
          def append_partition_by_name(db_name, tbl_name, part_name)
            send_append_partition_by_name(db_name, tbl_name, part_name)
            return recv_append_partition_by_name()
    @@ -474,6 +508,24 @@ module ThriftHiveMetastore
            raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'append_partition_by_name failed: unknown result')
          end

    + def append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, environment_context)
    + send_append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, environment_context)
    + return recv_append_partition_by_name_with_environment_context()
    + end
    +
    + def send_append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, environment_context)
    + send_message('append_partition_by_name_with_environment_context', Append_partition_by_name_with_environment_context_args, :db_name => db_name, :tbl_name => tbl_name, :part_name => part_name, :environment_context => environment_context)
    + end
    +
    + def recv_append_partition_by_name_with_environment_context()
    + result = receive_message(Append_partition_by_name_with_environment_context_result)
    + return result.success unless result.success.nil?
    + raise result.o1 unless result.o1.nil?
    + raise result.o2 unless result.o2.nil?
    + raise result.o3 unless result.o3.nil?
    + raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'append_partition_by_name_with_environment_context failed: unknown result')
    + end
    +
          def drop_partition(db_name, tbl_name, part_vals, deleteData)
            send_drop_partition(db_name, tbl_name, part_vals, deleteData)
            return recv_drop_partition()
    @@ -491,6 +543,23 @@ module ThriftHiveMetastore
            raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'drop_partition failed: unknown result')
          end

    + def drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context)
    + send_drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context)
    + return recv_drop_partition_with_environment_context()
    + end
    +
    + def send_drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context)
    + send_message('drop_partition_with_environment_context', Drop_partition_with_environment_context_args, :db_name => db_name, :tbl_name => tbl_name, :part_vals => part_vals, :deleteData => deleteData, :environment_context => environment_context)
    + end
    +
    + def recv_drop_partition_with_environment_context()
    + result = receive_message(Drop_partition_with_environment_context_result)
    + return result.success unless result.success.nil?
    + raise result.o1 unless result.o1.nil?
    + raise result.o2 unless result.o2.nil?
    + raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'drop_partition_with_environment_context failed: unknown result')
    + end
    +
          def drop_partition_by_name(db_name, tbl_name, part_name, deleteData)
            send_drop_partition_by_name(db_name, tbl_name, part_name, deleteData)
            return recv_drop_partition_by_name()
    @@ -508,6 +577,23 @@ module ThriftHiveMetastore
            raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'drop_partition_by_name failed: unknown result')
          end

    + def drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context)
    + send_drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context)
    + return recv_drop_partition_by_name_with_environment_context()
    + end
    +
    + def send_drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context)
    + send_message('drop_partition_by_name_with_environment_context', Drop_partition_by_name_with_environment_context_args, :db_name => db_name, :tbl_name => tbl_name, :part_name => part_name, :deleteData => deleteData, :environment_context => environment_context)
    + end
    +
    + def recv_drop_partition_by_name_with_environment_context()
    + result = receive_message(Drop_partition_by_name_with_environment_context_result)
    + return result.success unless result.success.nil?
    + raise result.o1 unless result.o1.nil?
    + raise result.o2 unless result.o2.nil?
    + raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'drop_partition_by_name_with_environment_context failed: unknown result')
    + end
    +
          def get_partition(db_name, tbl_name, part_vals)
            send_get_partition(db_name, tbl_name, part_vals)
            return recv_get_partition()
    @@ -1497,6 +1583,19 @@ module ThriftHiveMetastore
            write_result(result, oprot, 'drop_table', seqid)
          end

    + def process_drop_table_with_environment_context(seqid, iprot, oprot)
    + args = read_args(iprot, Drop_table_with_environment_context_args)
    + result = Drop_table_with_environment_context_result.new()
    + begin
    + @handler.drop_table_with_environment_context(args.dbname, args.name, args.deleteData, args.environment_context)
    + rescue ::NoSuchObjectException => o1
    + result.o1 = o1
    + rescue ::MetaException => o3
    + result.o3 = o3
    + end
    + write_result(result, oprot, 'drop_table_with_environment_context', seqid)
    + end
    +
          def process_get_tables(seqid, iprot, oprot)
            args = read_args(iprot, Get_tables_args)
            result = Get_tables_result.new()
    @@ -1648,6 +1747,21 @@ module ThriftHiveMetastore
            write_result(result, oprot, 'append_partition', seqid)
          end

    + def process_append_partition_with_environment_context(seqid, iprot, oprot)
    + args = read_args(iprot, Append_partition_with_environment_context_args)
    + result = Append_partition_with_environment_context_result.new()
    + begin
    + result.success = @handler.append_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.environment_context)
    + rescue ::InvalidObjectException => o1
    + result.o1 = o1
    + rescue ::AlreadyExistsException => o2
    + result.o2 = o2
    + rescue ::MetaException => o3
    + result.o3 = o3
    + end
    + write_result(result, oprot, 'append_partition_with_environment_context', seqid)
    + end
    +
          def process_append_partition_by_name(seqid, iprot, oprot)
            args = read_args(iprot, Append_partition_by_name_args)
            result = Append_partition_by_name_result.new()
    @@ -1663,6 +1777,21 @@ module ThriftHiveMetastore
            write_result(result, oprot, 'append_partition_by_name', seqid)
          end

    + def process_append_partition_by_name_with_environment_context(seqid, iprot, oprot)
    + args = read_args(iprot, Append_partition_by_name_with_environment_context_args)
    + result = Append_partition_by_name_with_environment_context_result.new()
    + begin
    + result.success = @handler.append_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.environment_context)
    + rescue ::InvalidObjectException => o1
    + result.o1 = o1
    + rescue ::AlreadyExistsException => o2
    + result.o2 = o2
    + rescue ::MetaException => o3
    + result.o3 = o3
    + end
    + write_result(result, oprot, 'append_partition_by_name_with_environment_context', seqid)
    + end
    +
          def process_drop_partition(seqid, iprot, oprot)
            args = read_args(iprot, Drop_partition_args)
            result = Drop_partition_result.new()
    @@ -1676,6 +1805,19 @@ module ThriftHiveMetastore
            write_result(result, oprot, 'drop_partition', seqid)
          end

    + def process_drop_partition_with_environment_context(seqid, iprot, oprot)
    + args = read_args(iprot, Drop_partition_with_environment_context_args)
    + result = Drop_partition_with_environment_context_result.new()
    + begin
    + result.success = @handler.drop_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.deleteData, args.environment_context)
    + rescue ::NoSuchObjectException => o1
    + result.o1 = o1
    + rescue ::MetaException => o2
    + result.o2 = o2
    + end
    + write_result(result, oprot, 'drop_partition_with_environment_context', seqid)
    + end
    +
          def process_drop_partition_by_name(seqid, iprot, oprot)
            args = read_args(iprot, Drop_partition_by_name_args)
            result = Drop_partition_by_name_result.new()
    @@ -1689,6 +1831,19 @@ module ThriftHiveMetastore
            write_result(result, oprot, 'drop_partition_by_name', seqid)
          end

    + def process_drop_partition_by_name_with_environment_context(seqid, iprot, oprot)
    + args = read_args(iprot, Drop_partition_by_name_with_environment_context_args)
    + result = Drop_partition_by_name_with_environment_context_result.new()
    + begin
    + result.success = @handler.drop_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.deleteData, args.environment_context)
    + rescue ::NoSuchObjectException => o1
    + result.o1 = o1
    + rescue ::MetaException => o2
    + result.o2 = o2
    + end
    + write_result(result, oprot, 'drop_partition_by_name_with_environment_context', seqid)
    + end
    +
          def process_get_partition(seqid, iprot, oprot)
            args = read_args(iprot, Get_partition_args)
            result = Get_partition_result.new()
    @@ -2850,6 +3005,46 @@ module ThriftHiveMetastore
          ::Thrift::Struct.generate_accessors self
        end

    + class Drop_table_with_environment_context_args
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + DBNAME = 1
    + NAME = 2
    + DELETEDATA = 3
    + ENVIRONMENT_CONTEXT = 4
    +
    + FIELDS = {
    + DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbname'},
    + NAME => {:type => ::Thrift::Types::STRING, :name => 'name'},
    + DELETEDATA => {:type => ::Thrift::Types::BOOL, :name => 'deleteData'},
    + ENVIRONMENT_CONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'environment_context', :class => ::EnvironmentContext}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    + end
    +
    + class Drop_table_with_environment_context_result
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + O1 = 1
    + O3 = 2
    +
    + FIELDS = {
    + O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
    + O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => ::MetaException}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    + end
    +
        class Get_tables_args
          include ::Thrift::Struct, ::Thrift::Struct_Union
          DB_NAME = 1
    @@ -3276,6 +3471,50 @@ module ThriftHiveMetastore
          ::Thrift::Struct.generate_accessors self
        end

    + class Append_partition_with_environment_context_args
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + DB_NAME = 1
    + TBL_NAME = 2
    + PART_VALS = 3
    + ENVIRONMENT_CONTEXT = 4
    +
    + FIELDS = {
    + DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
    + TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'},
    + PART_VALS => {:type => ::Thrift::Types::LIST, :name => 'part_vals', :element => {:type => ::Thrift::Types::STRING}},
    + ENVIRONMENT_CONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'environment_context', :class => ::EnvironmentContext}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    + end
    +
    + class Append_partition_with_environment_context_result
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + SUCCESS = 0
    + O1 = 1
    + O2 = 2
    + O3 = 3
    +
    + FIELDS = {
    + SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::Partition},
    + O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::InvalidObjectException},
    + O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::AlreadyExistsException},
    + O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => ::MetaException}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    + end
    +
        class Append_partition_by_name_args
          include ::Thrift::Struct, ::Thrift::Struct_Union
          DB_NAME = 1
    @@ -3318,6 +3557,50 @@ module ThriftHiveMetastore
          ::Thrift::Struct.generate_accessors self
        end

    + class Append_partition_by_name_with_environment_context_args
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + DB_NAME = 1
    + TBL_NAME = 2
    + PART_NAME = 3
    + ENVIRONMENT_CONTEXT = 4
    +
    + FIELDS = {
    + DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
    + TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'},
    + PART_NAME => {:type => ::Thrift::Types::STRING, :name => 'part_name'},
    + ENVIRONMENT_CONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'environment_context', :class => ::EnvironmentContext}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    + end
    +
    + class Append_partition_by_name_with_environment_context_result
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + SUCCESS = 0
    + O1 = 1
    + O2 = 2
    + O3 = 3
    +
    + FIELDS = {
    + SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::Partition},
    + O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::InvalidObjectException},
    + O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::AlreadyExistsException},
    + O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => ::MetaException}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    + end
    +
        class Drop_partition_args
          include ::Thrift::Struct, ::Thrift::Struct_Union
          DB_NAME = 1
    @@ -3360,6 +3643,50 @@ module ThriftHiveMetastore
          ::Thrift::Struct.generate_accessors self
        end

    + class Drop_partition_with_environment_context_args
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + DB_NAME = 1
    + TBL_NAME = 2
    + PART_VALS = 3
    + DELETEDATA = 4
    + ENVIRONMENT_CONTEXT = 5
    +
    + FIELDS = {
    + DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
    + TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'},
    + PART_VALS => {:type => ::Thrift::Types::LIST, :name => 'part_vals', :element => {:type => ::Thrift::Types::STRING}},
    + DELETEDATA => {:type => ::Thrift::Types::BOOL, :name => 'deleteData'},
    + ENVIRONMENT_CONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'environment_context', :class => ::EnvironmentContext}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    + end
    +
    + class Drop_partition_with_environment_context_result
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + SUCCESS = 0
    + O1 = 1
    + O2 = 2
    +
    + FIELDS = {
    + SUCCESS => {:type => ::Thrift::Types::BOOL, :name => 'success'},
    + O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
    + O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    + end
    +
        class Drop_partition_by_name_args
          include ::Thrift::Struct, ::Thrift::Struct_Union
          DB_NAME = 1
    @@ -3402,6 +3729,50 @@ module ThriftHiveMetastore
          ::Thrift::Struct.generate_accessors self
        end

    + class Drop_partition_by_name_with_environment_context_args
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + DB_NAME = 1
    + TBL_NAME = 2
    + PART_NAME = 3
    + DELETEDATA = 4
    + ENVIRONMENT_CONTEXT = 5
    +
    + FIELDS = {
    + DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
    + TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'},
    + PART_NAME => {:type => ::Thrift::Types::STRING, :name => 'part_name'},
    + DELETEDATA => {:type => ::Thrift::Types::BOOL, :name => 'deleteData'},
    + ENVIRONMENT_CONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'environment_context', :class => ::EnvironmentContext}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    + end
    +
    + class Drop_partition_by_name_with_environment_context_result
    + include ::Thrift::Struct, ::Thrift::Struct_Union
    + SUCCESS = 0
    + O1 = 1
    + O2 = 2
    +
    + FIELDS = {
    + SUCCESS => {:type => ::Thrift::Types::BOOL, :name => 'success'},
    + O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
    + O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
    + }
    +
    + def struct_fields; FIELDS; end
    +
    + def validate
    + end
    +
    + ::Thrift::Struct.generate_accessors self
    + end
    +
        class Get_partition_args
          include ::Thrift::Struct, ::Thrift::Struct_Union
          DB_NAME = 1

    Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
    +++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Tue Feb 12 18:52:55 2013
    @@ -1080,19 +1080,13 @@ public class HiveMetaStore extends Thrif
          @Override
          public void create_table(final Table tbl) throws AlreadyExistsException,
              MetaException, InvalidObjectException {
    - create_table(tbl, null);
    + create_table_with_environment_context(tbl, null);
          }

          @Override
    - public void create_table_with_environment_context(final Table table,
    + public void create_table_with_environment_context(final Table tbl,
              final EnvironmentContext envContext)
              throws AlreadyExistsException, MetaException, InvalidObjectException {
    - create_table(table, envContext);
    - }
    -
    - private void create_table(final Table tbl,
    - final EnvironmentContext envContext) throws AlreadyExistsException,
    - MetaException, InvalidObjectException {
            startFunction("create_table", ": " + tbl.toString());
            boolean success = false;
            Exception ex = null;
    @@ -1126,8 +1120,9 @@ public class HiveMetaStore extends Thrif
          }

          private void drop_table_core(final RawStore ms, final String dbname, final String name,
    - final boolean deleteData) throws NoSuchObjectException, MetaException, IOException,
    - InvalidObjectException, InvalidInputException {
    + final boolean deleteData, final EnvironmentContext envContext)
    + throws NoSuchObjectException, MetaException, IOException,
    + InvalidObjectException, InvalidInputException {
            boolean success = false;
            boolean isExternal = false;
            Path tblPath = null;
    @@ -1196,7 +1191,9 @@ public class HiveMetaStore extends Thrif
                // ok even if the data is not deleted
              }
              for (MetaStoreEventListener listener : listeners) {
    - listener.onDropTable(new DropTableEvent(tbl, success, this));
    + DropTableEvent dropTableEvent = new DropTableEvent(tbl, success, this);
    + dropTableEvent.setEnvironmentContext(envContext);
    + listener.onDropTable(dropTableEvent);
              }
            }
          }
    @@ -1300,14 +1297,22 @@ public class HiveMetaStore extends Thrif
            return partPaths;
          }

    + @Override
          public void drop_table(final String dbname, final String name, final boolean deleteData)
              throws NoSuchObjectException, MetaException {
    + drop_table_with_environment_context(dbname, name, deleteData, null);
    + }
    +
    + @Override
    + public void drop_table_with_environment_context(final String dbname, final String name,
    + final boolean deleteData, final EnvironmentContext envContext)
    + throws NoSuchObjectException, MetaException {
            startTableFunction("drop_table", dbname, name);

            boolean success = false;
            Exception ex = null;
            try {
    - drop_table_core(getMS(), dbname, name, deleteData);
    + drop_table_core(getMS(), dbname, name, deleteData, envContext);
              success = true;
            } catch (IOException e) {
              ex = e;
    @@ -1464,8 +1469,8 @@ public class HiveMetaStore extends Thrif
          }

          private Partition append_partition_common(RawStore ms, String dbName, String tableName,
    - List<String> part_vals) throws InvalidObjectException, AlreadyExistsException,
    - MetaException {
    + List<String> part_vals, EnvironmentContext envContext) throws InvalidObjectException,
    + AlreadyExistsException, MetaException {

            Partition part = new Partition();
            boolean success = false, madeDir = false;
    @@ -1535,6 +1540,7 @@ public class HiveMetaStore extends Thrif
              for (MetaStoreEventListener listener : listeners) {
                AddPartitionEvent addPartitionEvent =
                    new AddPartitionEvent(tbl, part, success, this);
    + addPartitionEvent.setEnvironmentContext(envContext);
                listener.onAddPartition(addPartitionEvent);
              }
            }
    @@ -1553,9 +1559,17 @@ public class HiveMetaStore extends Thrif
            }
          }

    + @Override
          public Partition append_partition(final String dbName, final String tableName,
              final List<String> part_vals) throws InvalidObjectException,
              AlreadyExistsException, MetaException {
    + return append_partition_with_environment_context(dbName, tableName, part_vals, null);
    + }
    +
    + @Override
    + public Partition append_partition_with_environment_context(final String dbName,
    + final String tableName, final List<String> part_vals, final EnvironmentContext envContext)
    + throws InvalidObjectException, AlreadyExistsException, MetaException {
            startPartitionFunction("append_partition", dbName, tableName, part_vals);
            if (LOG.isDebugEnabled()) {
              for (String part : part_vals) {
    @@ -1566,7 +1580,7 @@ public class HiveMetaStore extends Thrif
            Partition ret = null;
            Exception ex = null;
            try {
    - ret = append_partition_common(getMS(), dbName, tableName, part_vals);
    + ret = append_partition_common(getMS(), dbName, tableName, part_vals, envContext);
            } catch (Exception e) {
              ex = e;
              if (e instanceof MetaException) {
    @@ -1793,7 +1807,7 @@ public class HiveMetaStore extends Thrif
          @Override
          public Partition add_partition(final Partition part)
              throws InvalidObjectException, AlreadyExistsException, MetaException {
    - return add_partition(part, null);
    + return add_partition_with_environment_context(part, null);
          }

          @Override
    @@ -1801,12 +1815,6 @@ public class HiveMetaStore extends Thrif
              final Partition part, EnvironmentContext envContext)
              throws InvalidObjectException, AlreadyExistsException,
              MetaException {
    - return add_partition(part, envContext);
    - }
    -
    - private Partition add_partition(final Partition part,
    - final EnvironmentContext envContext) throws InvalidObjectException,
    - AlreadyExistsException, MetaException {
            startTableFunction("add_partition",
                part.getDbName(), part.getTableName());
            Partition ret = null;
    @@ -1833,7 +1841,7 @@ public class HiveMetaStore extends Thrif
          }

          private boolean drop_partition_common(RawStore ms, String db_name, String tbl_name,
    - List<String> part_vals, final boolean deleteData)
    + List<String> part_vals, final boolean deleteData, final EnvironmentContext envContext)
            throws MetaException, NoSuchObjectException, IOException, InvalidObjectException,
            InvalidInputException {
            boolean success = false;
    @@ -1894,22 +1902,34 @@ public class HiveMetaStore extends Thrif
                }
              }
              for (MetaStoreEventListener listener : listeners) {
    - listener.onDropPartition(new DropPartitionEvent(tbl, part, success, this));
    + DropPartitionEvent dropPartitionEvent = new DropPartitionEvent(tbl, part, success, this);
    + dropPartitionEvent.setEnvironmentContext(envContext);
    + listener.onDropPartition(dropPartitionEvent);
              }
            }
            return true;
          }

    + @Override
          public boolean drop_partition(final String db_name, final String tbl_name,
              final List<String> part_vals, final boolean deleteData)
              throws NoSuchObjectException, MetaException, TException {
    + return drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData,
    + null);
    + }
    +
    + @Override
    + public boolean drop_partition_with_environment_context(final String db_name,
    + final String tbl_name, final List<String> part_vals, final boolean deleteData,
    + final EnvironmentContext envContext)
    + throws NoSuchObjectException, MetaException, TException {
            startPartitionFunction("drop_partition", db_name, tbl_name, part_vals);
            LOG.info("Partition values:" + part_vals);

            boolean ret = false;
            Exception ex = null;
            try {
    - ret = drop_partition_common(getMS(), db_name, tbl_name, part_vals, deleteData);
    + ret = drop_partition_common(getMS(), db_name, tbl_name, part_vals, deleteData, envContext);
            } catch (IOException e) {
              ex = e;
              throw new MetaException(e.getMessage());
    @@ -2274,7 +2294,7 @@ public class HiveMetaStore extends Thrif
              final Table newTable)
              throws InvalidOperationException, MetaException {
            // Do not set an environment context.
    - alter_table(dbname, name, newTable, null);
    + alter_table_with_environment_context(dbname, name, newTable, null);
          }

          @Override
    @@ -2282,12 +2302,6 @@ public class HiveMetaStore extends Thrif
              final String name, final Table newTable,
              final EnvironmentContext envContext)
              throws InvalidOperationException, MetaException {
    - alter_table(dbname, name, newTable, envContext);
    - }
    -
    - private void alter_table(final String dbname, final String name,
    - final Table newTable, final EnvironmentContext envContext)
    - throws InvalidOperationException, MetaException {
            startFunction("alter_table", ": db=" + dbname + " tbl=" + name
                + " newtbl=" + newTable.getTableName());

    @@ -2609,9 +2623,17 @@ public class HiveMetaStore extends Thrif
            return ret;
          }

    + @Override
          public Partition append_partition_by_name(final String db_name, final String tbl_name,
              final String part_name) throws InvalidObjectException,
              AlreadyExistsException, MetaException, TException {
    + return append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, null);
    + }
    +
    + @Override
    + public Partition append_partition_by_name_with_environment_context(final String db_name,
    + final String tbl_name, final String part_name, final EnvironmentContext env_context)
    + throws InvalidObjectException, AlreadyExistsException, MetaException, TException {
            startFunction("append_partition_by_name", ": db=" + db_name + " tbl="
                + tbl_name + " part=" + part_name);

    @@ -2620,7 +2642,7 @@ public class HiveMetaStore extends Thrif
            try {
              RawStore ms = getMS();
              List<String> partVals = getPartValsFromName(ms, db_name, tbl_name, part_name);
    - ret = append_partition_common(ms, db_name, tbl_name, partVals);
    + ret = append_partition_common(ms, db_name, tbl_name, partVals, env_context);
            } catch (Exception e) {
              ex = e;
              if (e instanceof InvalidObjectException) {
    @@ -2642,10 +2664,10 @@ public class HiveMetaStore extends Thrif
            return ret;
          }

    - private boolean drop_partition_by_name_core(final RawStore ms,
    - final String db_name, final String tbl_name, final String part_name,
    - final boolean deleteData) throws NoSuchObjectException,
    - MetaException, TException, IOException, InvalidObjectException, InvalidInputException {
    + private boolean drop_partition_by_name_core(final RawStore ms, final String db_name,
    + final String tbl_name, final String part_name, final boolean deleteData,
    + final EnvironmentContext envContext) throws NoSuchObjectException, MetaException,
    + TException, IOException, InvalidObjectException, InvalidInputException {

            List<String> partVals = null;
            try {
    @@ -2654,13 +2676,22 @@ public class HiveMetaStore extends Thrif
              throw new NoSuchObjectException(e.getMessage());
            }

    - return drop_partition_common(ms, db_name, tbl_name, partVals, deleteData);
    + return drop_partition_common(ms, db_name, tbl_name, partVals, deleteData, envContext);
          }

          @Override
          public boolean drop_partition_by_name(final String db_name, final String tbl_name,
              final String part_name, final boolean deleteData) throws NoSuchObjectException,
              MetaException, TException {
    + return drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name,
    + deleteData, null);
    + }
    +
    + @Override
    + public boolean drop_partition_by_name_with_environment_context(final String db_name,
    + final String tbl_name, final String part_name, final boolean deleteData,
    + final EnvironmentContext envContext) throws NoSuchObjectException,
    + MetaException, TException {
            startFunction("drop_partition_by_name", ": db=" + db_name + " tbl="
                + tbl_name + " part=" + part_name);

    @@ -2668,7 +2699,7 @@ public class HiveMetaStore extends Thrif
            Exception ex = null;
            try {
              ret = drop_partition_by_name_core(getMS(), db_name, tbl_name,
    - part_name, deleteData);
    + part_name, deleteData, envContext);
            } catch (IOException e) {
              ex = e;
              throw new MetaException(e.getMessage());

    Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1445309&r1=1445308&r2=1445309&view=diff
    ==============================================================================
    --- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
    +++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Tue Feb 12 18:52:55 2013
    @@ -45,6 +45,7 @@ import org.apache.hadoop.hive.metastore.
      import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
      import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
      import org.apache.hadoop.hive.metastore.api.Database;
    +import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
      import org.apache.hadoop.hive.metastore.api.FieldSchema;
      import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
      import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
    @@ -206,7 +207,12 @@ public class HiveMetaStoreClient impleme
         */
        public void alter_table(String dbname, String tbl_name, Table new_tbl)
            throws InvalidOperationException, MetaException, TException {
    - client.alter_table(dbname, tbl_name, new_tbl);
    + alter_table(dbname, tbl_name, new_tbl, null);
    + }
    +
    + public void alter_table(String dbname, String tbl_name, Table new_tbl,
    + EnvironmentContext envContext) throws InvalidOperationException, MetaException, TException {
    + client.alter_table_with_environment_context(dbname, tbl_name, new_tbl, envContext);
        }

        /**
    @@ -358,7 +364,13 @@ public class HiveMetaStoreClient impleme
        public Partition add_partition(Partition new_part)
            throws InvalidObjectException, AlreadyExistsException, MetaException,
            TException {
    - return deepCopy(client.add_partition(new_part));
    + return add_partition(new_part, null);
    + }
    +
    + public Partition add_partition(Partition new_part, EnvironmentContext envContext)
    + throws InvalidObjectException, AlreadyExistsException, MetaException,
    + TException {
    + return deepCopy(client.add_partition_with_environment_context(new_part, envContext));
        }

        /**
    @@ -390,14 +402,26 @@ public class HiveMetaStoreClient impleme
        public Partition appendPartition(String db_name, String table_name,
            List<String> part_vals) throws InvalidObjectException,
            AlreadyExistsException, MetaException, TException {
    - return deepCopy(client.append_partition(db_name, table_name, part_vals));
    + return appendPartition(db_name, table_name, part_vals, null);
    + }
    +
    + public Partition appendPartition(String db_name, String table_name, List<String> part_vals,
    + EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException,
    + MetaException, TException {
    + return deepCopy(client.append_partition_with_environment_context(db_name, table_name,
    + part_vals, envContext));
        }

        public Partition appendPartition(String dbName, String tableName, String partName)
    - throws InvalidObjectException, AlreadyExistsException,
    - MetaException, TException {
    - return deepCopy(
    - client.append_partition_by_name(dbName, tableName, partName));
    + throws InvalidObjectException, AlreadyExistsException, MetaException, TException {
    + return appendPartition(dbName, tableName, partName, null);
    + }
    +
    + public Partition appendPartition(String dbName, String tableName, String partName,
    + EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException,
    + MetaException, TException {
    + return deepCopy(client.append_partition_by_name_with_environment_context(dbName, tableName,
    + partName, envContext));
        }

        /**
    @@ -423,13 +447,18 @@ public class HiveMetaStoreClient impleme
         */
        public void createTable(Table tbl) throws AlreadyExistsException,
            InvalidObjectException, MetaException, NoSuchObjectException, TException {
    + createTable(tbl, null);
    + }
    +
    + public void createTable(Table tbl, EnvironmentContext envContext) throws AlreadyExistsException,
    + InvalidObjectException, MetaException, NoSuchObjectException, TException {
          HiveMetaHook hook = getHook(tbl);
          if (hook != null) {
            hook.preCreateTable(tbl);
          }
          boolean success = false;
          try {
    - client.create_table(tbl);
    + client.create_table_with_environment_context(tbl, envContext);
            if (hook != null) {
              hook.commitCreateTable(tbl);
            }
    @@ -512,13 +541,25 @@ public class HiveMetaStoreClient impleme
        public boolean dropPartition(String db_name, String tbl_name,
            List<String> part_vals) throws NoSuchObjectException, MetaException,
            TException {
    - return dropPartition(db_name, tbl_name, part_vals, true);
    + return dropPartition(db_name, tbl_name, part_vals, true, null);
    + }
    +
    + public boolean dropPartition(String db_name, String tbl_name, List<String> part_vals,
    + EnvironmentContext env_context) throws NoSuchObjectException, MetaException, TException {
    + return dropPartition(db_name, tbl_name, part_vals, true, env_context);
        }

        public boolean dropPartition(String dbName, String tableName, String partName, boolean deleteData)
            throws NoSuchObjectException, MetaException, TException {
    - return client.drop_partition_by_name(dbName, tableName, partName, deleteData);
    + return dropPartition(dbName, tableName, partName, deleteData, null);
    + }
    +
    + public boolean dropPartition(String dbName, String tableName, String partName, boolean deleteData,
    + EnvironmentContext envContext) throws NoSuchObjectException, MetaException, TException {
    + return client.drop_partition_by_name_with_environment_context(dbName, tableName, partName,
    + deleteData, envContext);
        }
    +
        /**
         * @param db_name
         * @param tbl_name
    @@ -535,7 +576,14 @@ public class HiveMetaStoreClient impleme
        public boolean dropPartition(String db_name, String tbl_name,
            List<String> part_vals, boolean deleteData) throws NoSuchObjectException,
            MetaException, TException {
    - return client.drop_partition(db_name, tbl_name, part_vals, deleteData);
    + return dropPartition(db_name, tbl_name, part_vals, deleteData, null);
    + }
    +
    + public boolean dropPartition(String db_name, String tbl_name, List<String> part_vals,
    + boolean deleteData, EnvironmentContext envContext) throws NoSuchObjectException,
    + MetaException, TException {
    + return client.drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData,
    + envContext);
        }

        /**
    @@ -550,14 +598,14 @@ public class HiveMetaStoreClient impleme
         */
        public void dropTable(String dbname, String name)
            throws NoSuchObjectException, MetaException, TException {
    - dropTable(dbname, name, true, true);
    + dropTable(dbname, name, true, true, null);
        }

        /** {@inheritDoc} */
        @Deprecated
        public void dropTable(String tableName, boolean deleteData)
            throws MetaException, UnknownTableException, TException, NoSuchObjectException {
    - dropTable(DEFAULT_DATABASE_NAME, tableName, deleteData, false);
    + dropTable(DEFAULT_DATABASE_NAME, tableName, deleteData, false, null);
        }

        /**
    @@ -573,14 +621,19 @@ public class HiveMetaStoreClient impleme
         * java.lang.String, boolean)
         */
        public void dropTable(String dbname, String name, boolean deleteData,
    - boolean ignoreUknownTab) throws MetaException, TException,
    + boolean ignoreUnknownTab) throws MetaException, TException,
            NoSuchObjectException, UnsupportedOperationException {
    + dropTable(dbname, name, deleteData, ignoreUnknownTab, null);
    + }

    + public void dropTable(String dbname, String name, boolean deleteData,
    + boolean ignoreUnknownTab, EnvironmentContext envContext) throws MetaException, TException,
    + NoSuchObjectException, UnsupportedOperationException {
          Table tbl;
          try {
            tbl = getTable(dbname, name);
          } catch (NoSuchObjectException e) {
    - if (!ignoreUknownTab) {
    + if (!ignoreUnknownTab) {
              throw e;
            }
            return;
    @@ -594,13 +647,13 @@ public class HiveMetaStoreClient impleme
          }
          boolean success = false;
          try {
    - client.drop_table(dbname, name, deleteData);
    + client.drop_table_with_environment_context(dbname, name, deleteData, envContext);
            if (hook != null) {
              hook.commitDropTable(tbl, deleteData);
            }
            success=true;
          } catch (NoSuchObjectException e) {
    - if (!ignoreUknownTab) {
    + if (!ignoreUnknownTab) {
              throw e;
            }
          } finally {
    @@ -1038,13 +1091,26 @@ public class HiveMetaStoreClient impleme

        public Partition appendPartitionByName(String dbName, String tableName, String partName)
            throws InvalidObjectException, AlreadyExistsException, MetaException, TException {
    - return deepCopy(
    - client.append_partition_by_name(dbName, tableName, partName));
    + return appendPartitionByName(dbName, tableName, partName, null);
        }

    - public boolean dropPartitionByName(String dbName, String tableName, String partName, boolean deleteData)
    - throws NoSuchObjectException, MetaException, TException {
    - return client.drop_partition_by_name(dbName, tableName, partName, deleteData);
    + public Partition appendPartitionByName(String dbName, String tableName, String partName,
    + EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException,
    + MetaException, TException {
    + return deepCopy(client.append_partition_by_name_with_environment_context(dbName, tableName,
    + partName, envContext));
    + }
    +
    + public boolean dropPartitionByName(String dbName, String tableName, String partName,
    + boolean deleteData) throws NoSuchObjectException, MetaException, TException {
    + return dropPartitionByName(dbName, tableName, partName, deleteData, null);
    + }
    +
    + public boolean dropPartitionByName(String dbName, String tableName, String partName,
    + boolean deleteData, EnvironmentContext envContext) throws NoSuchObjectException,
    + MetaException, TException {
    + return client.drop_partition_by_name_with_environment_context(dbName, tableName, partName,
    + deleteData, envContext);
        }

        private HiveMetaHook getHook(Table tbl) throws MetaException {

    Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java
    URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java?rev=1445309&view=auto
    ==============================================================================
    --- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java (added)
    +++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java Tue Feb 12 18:52:55 2013
    @@ -0,0 +1,222 @@
    +/**
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements. See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership. The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License. You may obtain a copy of the License at
    + *
    + * http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.hadoop.hive.metastore;
    +
    +import java.util.ArrayList;
    +import java.util.HashMap;
    +import java.util.List;
    +import java.util.Map;
    +
    +import junit.framework.TestCase;
    +
    +import org.apache.hadoop.hive.cli.CliSessionState;
    +import org.apache.hadoop.hive.conf.HiveConf;
    +import org.apache.hadoop.hive.metastore.api.Database;
    +import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
    +import org.apache.hadoop.hive.metastore.api.FieldSchema;
    +import org.apache.hadoop.hive.metastore.api.Partition;
    +import org.apache.hadoop.hive.metastore.api.SerDeInfo;
    +import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
    +import org.apache.hadoop.hive.metastore.api.Table;
    +import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
    +import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
    +import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
    +import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
    +import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
    +import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
    +import org.apache.hadoop.hive.metastore.events.DropTableEvent;
    +import org.apache.hadoop.hive.metastore.events.ListenerEvent;
    +import org.apache.hadoop.hive.ql.session.SessionState;
    +import org.apache.hadoop.hive.serde.serdeConstants;
    +import org.apache.hadoop.hive.shims.ShimLoader;
    +import org.mortbay.log.Log;
    +
    +/**
    + * TestHiveMetaStoreWithEnvironmentContext. Test case for _with_environment_context
    + * calls in {@link org.apache.hadoop.hive.metastore.HiveMetaStore}
    + */
    +public class TestHiveMetaStoreWithEnvironmentContext extends TestCase {
    +
    + private HiveConf hiveConf;
    + private HiveMetaStoreClient msc;
    + private EnvironmentContext envContext;
    + private final Database db = new Database();
    + private Table table = new Table();
    + private final Partition partition = new Partition();
    +
    + private static final String dbName = "tmpdb";
    + private static final String tblName = "tmptbl";
    + private static final String renamed = "tmptbl2";
    +
    + @Override
    + protected void setUp() throws Exception {
    + super.setUp();
    +
    + System.setProperty("hive.metastore.event.listeners",
    + DummyListener.class.getName());
    +
    + int port = MetaStoreUtils.findFreePort();
    + MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
    +
    + hiveConf = new HiveConf(this.getClass());
    + hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
    + hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    + hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    + hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    + hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
    + SessionState.start(new CliSessionState(hiveConf));
    + msc = new HiveMetaStoreClient(hiveConf, null);
    +
    + msc.dropDatabase(dbName, true, true);
    +
    + Map<String, String> envProperties = new HashMap<String, String>();
    + envProperties.put("hadoop.job.ugi", "test_user");
    + envContext = new EnvironmentContext(envProperties);
    +
    + db.setName(dbName);
    +
    + Map<String, String> tableParams = new HashMap<String, String>();
    + tableParams.put("a", "string");
    + List<FieldSchema> partitionKeys = new ArrayList<FieldSchema>();
    + partitionKeys.add(new FieldSchema("b", "string", ""));
    +
    + List<FieldSchema> cols = new ArrayList<FieldSchema>();
    + cols.add(new FieldSchema("a", "string", ""));
    + cols.add(new FieldSchema("b", "string", ""));
    + StorageDescriptor sd = new StorageDescriptor();
    + sd.setCols(cols);
    + sd.setCompressed(false);
    + sd.setParameters(tableParams);
    + sd.setSerdeInfo(new SerDeInfo());
    + sd.getSerdeInfo().setName(tblName);
    + sd.getSerdeInfo().setParameters(new HashMap<String, String>());
    + sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
    +
    + table.setDbName(dbName);
    + table.setTableName(tblName);
    + table.setParameters(tableParams);
    + table.setPartitionKeys(partitionKeys);
    + table.setSd(sd);
    +
    + List<String> partValues = new ArrayList<String>();
    + partValues.add("2011");
    + partition.setDbName(dbName);
    + partition.setTableName(tblName);
    + partition.setValues(partValues);
    + partition.setSd(table.getSd().deepCopy());
    + partition.getSd().setSerdeInfo(table.getSd().getSerdeInfo().deepCopy());
    +
    + DummyListener.notifyList.clear();
    + }
    +
    + @Override
    + protected void tearDown() throws Exception {
    + super.tearDown();
    + }
    +
    + public void testEnvironmentContext() throws Exception {
    + int listSize = 0;
    +
    + List<ListenerEvent> notifyList = DummyListener.notifyList;
    + assertEquals(notifyList.size(), listSize);
    + msc.createDatabase(db);
    + listSize++;
    + assertEquals(listSize, notifyList.size());
    + CreateDatabaseEvent dbEvent = (CreateDatabaseEvent)(notifyList.get(listSize - 1));
    + assert dbEvent.getStatus();
    +
    + Log.debug("Creating table");
    + msc.createTable(table, envContext);
    + listSize++;
    + assertEquals(notifyList.size(), listSize);
    + CreateTableEvent tblEvent = (CreateTableEvent)(notifyList.get(listSize - 1));
    + assert tblEvent.getStatus();
    + assertEquals(envContext, tblEvent.getEnvironmentContext());
    +
    + table = msc.getTable(dbName, tblName);
    +
    + Log.debug("Adding partition");
    + partition.getSd().setLocation(table.getSd().getLocation() + "/part1");
    + msc.add_partition(partition, envContext);
    + listSize++;
    + assertEquals(notifyList.size(), listSize);
    + AddPartitionEvent partEvent = (AddPartitionEvent)(notifyList.get(listSize-1));
    + assert partEvent.getStatus();
    + assertEquals(envContext, partEvent.getEnvironmentContext());
    +
    + Log.debug("Appending partition");
    + List<String> partVals = new ArrayList<String>();
    + partVals.add("2012");
    + msc.appendPartition(dbName, tblName, partVals, envContext);
    + listSize++;
    + assertEquals(notifyList.size(), listSize);
    + AddPartitionEvent appendPartEvent = (AddPartitionEvent)(notifyList.get(listSize-1));
    + assert appendPartEvent.getStatus();
    + assertEquals(envContext, appendPartEvent.getEnvironmentContext());
    +
    + Log.debug("Renaming table");
    + table.setTableName(renamed);
    + msc.alter_table(dbName, tblName, table, envContext);
    + listSize++;
    + assertEquals(notifyList.size(), listSize);
    + AlterTableEvent alterTableEvent = (AlterTableEvent) notifyList.get(listSize-1);
    + assert alterTableEvent.getStatus();
    + assertEquals(envContext, alterTableEvent.getEnvironmentContext());
    +
    + Log.debug("Renaming table back");
    + table.setTableName(tblName);
    + msc.alter_table(dbName, renamed, table, envContext);
    + listSize++;
    + assertEquals(notifyList.size(), listSize);
    +
    + Log.debug("Dropping partition");
    + List<String> dropPartVals = new ArrayList<String>();
    + dropPartVals.add("2011");
    + msc.dropPartition(dbName, tblName, dropPartVals, envContext);
    + listSize++;
    + assertEquals(notifyList.size(), listSize);
    + DropPartitionEvent dropPartEvent = (DropPartitionEvent)notifyList.get(listSize - 1);
    + assert dropPartEvent.getStatus();
    + assertEquals(envContext, dropPartEvent.getEnvironmentContext());
    +
    + Log.debug("Dropping partition by name");
    + msc.dropPartition(dbName, tblName, "b=2012", true, envContext);
    + listSize++;
    + assertEquals(notifyList.size(), listSize);
    + DropPartitionEvent dropPartByNameEvent = (DropPartitionEvent)notifyList.get(listSize - 1);
    + assert dropPartByNameEvent.getStatus();
    + assertEquals(envContext, dropPartByNameEvent.getEnvironmentContext());
    +
    + Log.debug("Dropping table");
    + msc.dropTable(dbName, tblName, true, false, envContext);
    + listSize++;
    + assertEquals(notifyList.size(), listSize);
    + DropTableEvent dropTblEvent = (DropTableEvent)notifyList.get(listSize-1);
    + assert dropTblEvent.getStatus();
    + assertEquals(envContext, dropTblEvent.getEnvironmentContext());
    +
    + msc.dropDatabase(dbName);
    + listSize++;
    + assertEquals(notifyList.size(), listSize);
    +
    + DropDatabaseEvent dropDB = (DropDatabaseEvent)notifyList.get(listSize-1);
    + assert dropDB.getStatus();
    + }
    +
    +}

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedFeb 12, '13 at 6:53p
activeFeb 12, '13 at 6:53p
posts8
users1
websitehive.apache.org

1 user in discussion

Kevinwilfong: 8 posts

People

Translate

site design / logo © 2021 Grokbase