FAQ
Modified: hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java?rev=1445309&r1=1445308&r2=1445309&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java Tue Feb 12 18:52:55 2013
@@ -68,6 +68,8 @@ public class ThriftHiveMetastore {

      public void drop_table(String dbname, String name, boolean deleteData) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;

+ public void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
      public List<String> get_tables(String db_name, String pattern) throws MetaException, org.apache.thrift.TException;

      public List<String> get_all_tables(String db_name) throws MetaException, org.apache.thrift.TException;
@@ -90,12 +92,20 @@ public class ThriftHiveMetastore {

      public Partition append_partition(String db_name, String tbl_name, List<String> part_vals) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException;

+ public Partition append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException;
+
      public Partition append_partition_by_name(String db_name, String tbl_name, String part_name) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException;

+ public Partition append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException;
+
      public boolean drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;

+ public boolean drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
      public boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;

+ public boolean drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
      public Partition get_partition(String db_name, String tbl_name, List<String> part_vals) throws MetaException, NoSuchObjectException, org.apache.thrift.TException;

      public Partition get_partition_with_auth(String db_name, String tbl_name, List<String> part_vals, String user_name, List<String> group_names) throws MetaException, NoSuchObjectException, org.apache.thrift.TException;
@@ -222,6 +232,8 @@ public class ThriftHiveMetastore {

      public void drop_table(String dbname, String name, boolean deleteData, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_table_call> resultHandler) throws org.apache.thrift.TException;

+ public void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_table_with_environment_context_call> resultHandler) throws org.apache.thrift.TException;
+
      public void get_tables(String db_name, String pattern, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.get_tables_call> resultHandler) throws org.apache.thrift.TException;

      public void get_all_tables(String db_name, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.get_all_tables_call> resultHandler) throws org.apache.thrift.TException;
@@ -244,12 +256,20 @@ public class ThriftHiveMetastore {

      public void append_partition(String db_name, String tbl_name, List<String> part_vals, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.append_partition_call> resultHandler) throws org.apache.thrift.TException;

+ public void append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.append_partition_with_environment_context_call> resultHandler) throws org.apache.thrift.TException;
+
      public void append_partition_by_name(String db_name, String tbl_name, String part_name, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.append_partition_by_name_call> resultHandler) throws org.apache.thrift.TException;

+ public void append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.append_partition_by_name_with_environment_context_call> resultHandler) throws org.apache.thrift.TException;
+
      public void drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_partition_call> resultHandler) throws org.apache.thrift.TException;

+ public void drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_partition_with_environment_context_call> resultHandler) throws org.apache.thrift.TException;
+
      public void drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_partition_by_name_call> resultHandler) throws org.apache.thrift.TException;

+ public void drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.drop_partition_by_name_with_environment_context_call> resultHandler) throws org.apache.thrift.TException;
+
      public void get_partition(String db_name, String tbl_name, List<String> part_vals, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.get_partition_call> resultHandler) throws org.apache.thrift.TException;

      public void get_partition_with_auth(String db_name, String tbl_name, List<String> part_vals, String user_name, List<String> group_names, org.apache.thrift.async.AsyncMethodCallback<AsyncClient.get_partition_with_auth_call> resultHandler) throws org.apache.thrift.TException;
@@ -806,6 +826,35 @@ public class ThriftHiveMetastore {
        return;
      }

+ public void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_drop_table_with_environment_context(dbname, name, deleteData, environment_context);
+ recv_drop_table_with_environment_context();
+ }
+
+ public void send_drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context) throws org.apache.thrift.TException
+ {
+ drop_table_with_environment_context_args args = new drop_table_with_environment_context_args();
+ args.setDbname(dbname);
+ args.setName(name);
+ args.setDeleteData(deleteData);
+ args.setEnvironment_context(environment_context);
+ sendBase("drop_table_with_environment_context", args);
+ }
+
+ public void recv_drop_table_with_environment_context() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ drop_table_with_environment_context_result result = new drop_table_with_environment_context_result();
+ receiveBase(result, "drop_table_with_environment_context");
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o3 != null) {
+ throw result.o3;
+ }
+ return;
+ }
+
      public List<String> get_tables(String db_name, String pattern) throws MetaException, org.apache.thrift.TException
      {
        send_get_tables(db_name, pattern);
@@ -1144,6 +1193,41 @@ public class ThriftHiveMetastore {
        throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "append_partition failed: unknown result");
      }

+ public Partition append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException
+ {
+ send_append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context);
+ return recv_append_partition_with_environment_context();
+ }
+
+ public void send_append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context) throws org.apache.thrift.TException
+ {
+ append_partition_with_environment_context_args args = new append_partition_with_environment_context_args();
+ args.setDb_name(db_name);
+ args.setTbl_name(tbl_name);
+ args.setPart_vals(part_vals);
+ args.setEnvironment_context(environment_context);
+ sendBase("append_partition_with_environment_context", args);
+ }
+
+ public Partition recv_append_partition_with_environment_context() throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException
+ {
+ append_partition_with_environment_context_result result = new append_partition_with_environment_context_result();
+ receiveBase(result, "append_partition_with_environment_context");
+ if (result.isSetSuccess()) {
+ return result.success;
+ }
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ if (result.o3 != null) {
+ throw result.o3;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "append_partition_with_environment_context failed: unknown result");
+ }
+
      public Partition append_partition_by_name(String db_name, String tbl_name, String part_name) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException
      {
        send_append_partition_by_name(db_name, tbl_name, part_name);
@@ -1178,6 +1262,41 @@ public class ThriftHiveMetastore {
        throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "append_partition_by_name failed: unknown result");
      }

+ public Partition append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context) throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException
+ {
+ send_append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, environment_context);
+ return recv_append_partition_by_name_with_environment_context();
+ }
+
+ public void send_append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context) throws org.apache.thrift.TException
+ {
+ append_partition_by_name_with_environment_context_args args = new append_partition_by_name_with_environment_context_args();
+ args.setDb_name(db_name);
+ args.setTbl_name(tbl_name);
+ args.setPart_name(part_name);
+ args.setEnvironment_context(environment_context);
+ sendBase("append_partition_by_name_with_environment_context", args);
+ }
+
+ public Partition recv_append_partition_by_name_with_environment_context() throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException
+ {
+ append_partition_by_name_with_environment_context_result result = new append_partition_by_name_with_environment_context_result();
+ receiveBase(result, "append_partition_by_name_with_environment_context");
+ if (result.isSetSuccess()) {
+ return result.success;
+ }
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ if (result.o3 != null) {
+ throw result.o3;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "append_partition_by_name_with_environment_context failed: unknown result");
+ }
+
      public boolean drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
      {
        send_drop_partition(db_name, tbl_name, part_vals, deleteData);
@@ -1210,6 +1329,39 @@ public class ThriftHiveMetastore {
        throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "drop_partition failed: unknown result");
      }

+ public boolean drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context);
+ return recv_drop_partition_with_environment_context();
+ }
+
+ public void send_drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context) throws org.apache.thrift.TException
+ {
+ drop_partition_with_environment_context_args args = new drop_partition_with_environment_context_args();
+ args.setDb_name(db_name);
+ args.setTbl_name(tbl_name);
+ args.setPart_vals(part_vals);
+ args.setDeleteData(deleteData);
+ args.setEnvironment_context(environment_context);
+ sendBase("drop_partition_with_environment_context", args);
+ }
+
+ public boolean recv_drop_partition_with_environment_context() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ drop_partition_with_environment_context_result result = new drop_partition_with_environment_context_result();
+ receiveBase(result, "drop_partition_with_environment_context");
+ if (result.isSetSuccess()) {
+ return result.success;
+ }
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "drop_partition_with_environment_context failed: unknown result");
+ }
+
      public boolean drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
      {
        send_drop_partition_by_name(db_name, tbl_name, part_name, deleteData);
@@ -1242,6 +1394,39 @@ public class ThriftHiveMetastore {
        throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "drop_partition_by_name failed: unknown result");
      }

+ public boolean drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ send_drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context);
+ return recv_drop_partition_by_name_with_environment_context();
+ }
+
+ public void send_drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context) throws org.apache.thrift.TException
+ {
+ drop_partition_by_name_with_environment_context_args args = new drop_partition_by_name_with_environment_context_args();
+ args.setDb_name(db_name);
+ args.setTbl_name(tbl_name);
+ args.setPart_name(part_name);
+ args.setDeleteData(deleteData);
+ args.setEnvironment_context(environment_context);
+ sendBase("drop_partition_by_name_with_environment_context", args);
+ }
+
+ public boolean recv_drop_partition_by_name_with_environment_context() throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+ {
+ drop_partition_by_name_with_environment_context_result result = new drop_partition_by_name_with_environment_context_result();
+ receiveBase(result, "drop_partition_by_name_with_environment_context");
+ if (result.isSetSuccess()) {
+ return result.success;
+ }
+ if (result.o1 != null) {
+ throw result.o1;
+ }
+ if (result.o2 != null) {
+ throw result.o2;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "drop_partition_by_name_with_environment_context failed: unknown result");
+ }
+
      public Partition get_partition(String db_name, String tbl_name, List<String> part_vals) throws MetaException, NoSuchObjectException, org.apache.thrift.TException
      {
        send_get_partition(db_name, tbl_name, part_vals);
@@ -3165,6 +3350,47 @@ public class ThriftHiveMetastore {
        }
      }

+ public void drop_table_with_environment_context(String dbname, String name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_table_with_environment_context_call> resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ drop_table_with_environment_context_call method_call = new drop_table_with_environment_context_call(dbname, name, deleteData, environment_context, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class drop_table_with_environment_context_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String dbname;
+ private String name;
+ private boolean deleteData;
+ private EnvironmentContext environment_context;
+ public drop_table_with_environment_context_call(String dbname, String name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_table_with_environment_context_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.dbname = dbname;
+ this.name = name;
+ this.deleteData = deleteData;
+ this.environment_context = environment_context;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("drop_table_with_environment_context", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ drop_table_with_environment_context_args args = new drop_table_with_environment_context_args();
+ args.setDbname(dbname);
+ args.setName(name);
+ args.setDeleteData(deleteData);
+ args.setEnvironment_context(environment_context);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_drop_table_with_environment_context();
+ }
+ }
+
      public void get_tables(String db_name, String pattern, org.apache.thrift.async.AsyncMethodCallback<get_tables_call> resultHandler) throws org.apache.thrift.TException {
        checkReady();
        get_tables_call method_call = new get_tables_call(db_name, pattern, resultHandler, this, ___protocolFactory, ___transport);
@@ -3556,6 +3782,47 @@ public class ThriftHiveMetastore {
        }
      }

+ public void append_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<append_partition_with_environment_context_call> resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ append_partition_with_environment_context_call method_call = new append_partition_with_environment_context_call(db_name, tbl_name, part_vals, environment_context, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class append_partition_with_environment_context_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String db_name;
+ private String tbl_name;
+ private List<String> part_vals;
+ private EnvironmentContext environment_context;
+ public append_partition_with_environment_context_call(String db_name, String tbl_name, List<String> part_vals, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<append_partition_with_environment_context_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.db_name = db_name;
+ this.tbl_name = tbl_name;
+ this.part_vals = part_vals;
+ this.environment_context = environment_context;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("append_partition_with_environment_context", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ append_partition_with_environment_context_args args = new append_partition_with_environment_context_args();
+ args.setDb_name(db_name);
+ args.setTbl_name(tbl_name);
+ args.setPart_vals(part_vals);
+ args.setEnvironment_context(environment_context);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public Partition getResult() throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_append_partition_with_environment_context();
+ }
+ }
+
      public void append_partition_by_name(String db_name, String tbl_name, String part_name, org.apache.thrift.async.AsyncMethodCallback<append_partition_by_name_call> resultHandler) throws org.apache.thrift.TException {
        checkReady();
        append_partition_by_name_call method_call = new append_partition_by_name_call(db_name, tbl_name, part_name, resultHandler, this, ___protocolFactory, ___transport);
@@ -3594,6 +3861,47 @@ public class ThriftHiveMetastore {
        }
      }

+ public void append_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<append_partition_by_name_with_environment_context_call> resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ append_partition_by_name_with_environment_context_call method_call = new append_partition_by_name_with_environment_context_call(db_name, tbl_name, part_name, environment_context, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class append_partition_by_name_with_environment_context_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String db_name;
+ private String tbl_name;
+ private String part_name;
+ private EnvironmentContext environment_context;
+ public append_partition_by_name_with_environment_context_call(String db_name, String tbl_name, String part_name, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<append_partition_by_name_with_environment_context_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.db_name = db_name;
+ this.tbl_name = tbl_name;
+ this.part_name = part_name;
+ this.environment_context = environment_context;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("append_partition_by_name_with_environment_context", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ append_partition_by_name_with_environment_context_args args = new append_partition_by_name_with_environment_context_args();
+ args.setDb_name(db_name);
+ args.setTbl_name(tbl_name);
+ args.setPart_name(part_name);
+ args.setEnvironment_context(environment_context);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public Partition getResult() throws InvalidObjectException, AlreadyExistsException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_append_partition_by_name_with_environment_context();
+ }
+ }
+
      public void drop_partition(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, org.apache.thrift.async.AsyncMethodCallback<drop_partition_call> resultHandler) throws org.apache.thrift.TException {
        checkReady();
        drop_partition_call method_call = new drop_partition_call(db_name, tbl_name, part_vals, deleteData, resultHandler, this, ___protocolFactory, ___transport);
@@ -3635,6 +3943,50 @@ public class ThriftHiveMetastore {
        }
      }

+ public void drop_partition_with_environment_context(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_partition_with_environment_context_call> resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ drop_partition_with_environment_context_call method_call = new drop_partition_with_environment_context_call(db_name, tbl_name, part_vals, deleteData, environment_context, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class drop_partition_with_environment_context_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String db_name;
+ private String tbl_name;
+ private List<String> part_vals;
+ private boolean deleteData;
+ private EnvironmentContext environment_context;
+ public drop_partition_with_environment_context_call(String db_name, String tbl_name, List<String> part_vals, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_partition_with_environment_context_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.db_name = db_name;
+ this.tbl_name = tbl_name;
+ this.part_vals = part_vals;
+ this.deleteData = deleteData;
+ this.environment_context = environment_context;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("drop_partition_with_environment_context", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ drop_partition_with_environment_context_args args = new drop_partition_with_environment_context_args();
+ args.setDb_name(db_name);
+ args.setTbl_name(tbl_name);
+ args.setPart_vals(part_vals);
+ args.setDeleteData(deleteData);
+ args.setEnvironment_context(environment_context);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public boolean getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_drop_partition_with_environment_context();
+ }
+ }
+
      public void drop_partition_by_name(String db_name, String tbl_name, String part_name, boolean deleteData, org.apache.thrift.async.AsyncMethodCallback<drop_partition_by_name_call> resultHandler) throws org.apache.thrift.TException {
        checkReady();
        drop_partition_by_name_call method_call = new drop_partition_by_name_call(db_name, tbl_name, part_name, deleteData, resultHandler, this, ___protocolFactory, ___transport);
@@ -3676,6 +4028,50 @@ public class ThriftHiveMetastore {
        }
      }

+ public void drop_partition_by_name_with_environment_context(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_partition_by_name_with_environment_context_call> resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ drop_partition_by_name_with_environment_context_call method_call = new drop_partition_by_name_with_environment_context_call(db_name, tbl_name, part_name, deleteData, environment_context, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class drop_partition_by_name_with_environment_context_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String db_name;
+ private String tbl_name;
+ private String part_name;
+ private boolean deleteData;
+ private EnvironmentContext environment_context;
+ public drop_partition_by_name_with_environment_context_call(String db_name, String tbl_name, String part_name, boolean deleteData, EnvironmentContext environment_context, org.apache.thrift.async.AsyncMethodCallback<drop_partition_by_name_with_environment_context_call> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.db_name = db_name;
+ this.tbl_name = tbl_name;
+ this.part_name = part_name;
+ this.deleteData = deleteData;
+ this.environment_context = environment_context;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("drop_partition_by_name_with_environment_context", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ drop_partition_by_name_with_environment_context_args args = new drop_partition_by_name_with_environment_context_args();
+ args.setDb_name(db_name);
+ args.setTbl_name(tbl_name);
+ args.setPart_name(part_name);
+ args.setDeleteData(deleteData);
+ args.setEnvironment_context(environment_context);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public boolean getResult() throws NoSuchObjectException, MetaException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_drop_partition_by_name_with_environment_context();
+ }
+ }
+
      public void get_partition(String db_name, String tbl_name, List<String> part_vals, org.apache.thrift.async.AsyncMethodCallback<get_partition_call> resultHandler) throws org.apache.thrift.TException {
        checkReady();
        get_partition_call method_call = new get_partition_call(db_name, tbl_name, part_vals, resultHandler, this, ___protocolFactory, ___transport);
@@ -5431,6 +5827,7 @@ public class ThriftHiveMetastore {
        processMap.put("create_table", new create_table());
        processMap.put("create_table_with_environment_context", new create_table_with_environment_context());
        processMap.put("drop_table", new drop_table());
+ processMap.put("drop_table_with_environment_context", new drop_table_with_environment_context());
        processMap.put("get_tables", new get_tables());
        processMap.put("get_all_tables", new get_all_tables());
        processMap.put("get_table", new get_table());
@@ -5442,9 +5839,13 @@ public class ThriftHiveMetastore {
        processMap.put("add_partition_with_environment_context", new add_partition_with_environment_context());
        processMap.put("add_partitions", new add_partitions());
        processMap.put("append_partition", new append_partition());
+ processMap.put("append_partition_with_environment_context", new append_partition_with_environment_context());
        processMap.put("append_partition_by_name", new append_partition_by_name());
+ processMap.put("append_partition_by_name_with_environment_context", new append_partition_by_name_with_environment_context());
        processMap.put("drop_partition", new drop_partition());
+ processMap.put("drop_partition_with_environment_context", new drop_partition_with_environment_context());
        processMap.put("drop_partition_by_name", new drop_partition_by_name());
+ processMap.put("drop_partition_by_name_with_environment_context", new drop_partition_by_name_with_environment_context());
        processMap.put("get_partition", new get_partition());
        processMap.put("get_partition_with_auth", new get_partition_with_auth());
        processMap.put("get_partition_by_name", new get_partition_by_name());
@@ -5898,6 +6299,32 @@ public class ThriftHiveMetastore {
        }
      }

+ public static class drop_table_with_environment_context<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_table_with_environment_context_args> {
+ public drop_table_with_environment_context() {
+ super("drop_table_with_environment_context");
+ }
+
+ public drop_table_with_environment_context_args getEmptyArgsInstance() {
+ return new drop_table_with_environment_context_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public drop_table_with_environment_context_result getResult(I iface, drop_table_with_environment_context_args args) throws org.apache.thrift.TException {
+ drop_table_with_environment_context_result result = new drop_table_with_environment_context_result();
+ try {
+ iface.drop_table_with_environment_context(args.dbname, args.name, args.deleteData, args.environment_context);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o3) {
+ result.o3 = o3;
+ }
+ return result;
+ }
+ }
+
      public static class get_tables<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_tables_args> {
        public get_tables() {
          super("get_tables");
@@ -6193,6 +6620,34 @@ public class ThriftHiveMetastore {
        }
      }

+ public static class append_partition_with_environment_context<I extends Iface> extends org.apache.thrift.ProcessFunction<I, append_partition_with_environment_context_args> {
+ public append_partition_with_environment_context() {
+ super("append_partition_with_environment_context");
+ }
+
+ public append_partition_with_environment_context_args getEmptyArgsInstance() {
+ return new append_partition_with_environment_context_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public append_partition_with_environment_context_result getResult(I iface, append_partition_with_environment_context_args args) throws org.apache.thrift.TException {
+ append_partition_with_environment_context_result result = new append_partition_with_environment_context_result();
+ try {
+ result.success = iface.append_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.environment_context);
+ } catch (InvalidObjectException o1) {
+ result.o1 = o1;
+ } catch (AlreadyExistsException o2) {
+ result.o2 = o2;
+ } catch (MetaException o3) {
+ result.o3 = o3;
+ }
+ return result;
+ }
+ }
+
      public static class append_partition_by_name<I extends Iface> extends org.apache.thrift.ProcessFunction<I, append_partition_by_name_args> {
        public append_partition_by_name() {
          super("append_partition_by_name");
@@ -6221,6 +6676,34 @@ public class ThriftHiveMetastore {
        }
      }

+ public static class append_partition_by_name_with_environment_context<I extends Iface> extends org.apache.thrift.ProcessFunction<I, append_partition_by_name_with_environment_context_args> {
+ public append_partition_by_name_with_environment_context() {
+ super("append_partition_by_name_with_environment_context");
+ }
+
+ public append_partition_by_name_with_environment_context_args getEmptyArgsInstance() {
+ return new append_partition_by_name_with_environment_context_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public append_partition_by_name_with_environment_context_result getResult(I iface, append_partition_by_name_with_environment_context_args args) throws org.apache.thrift.TException {
+ append_partition_by_name_with_environment_context_result result = new append_partition_by_name_with_environment_context_result();
+ try {
+ result.success = iface.append_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.environment_context);
+ } catch (InvalidObjectException o1) {
+ result.o1 = o1;
+ } catch (AlreadyExistsException o2) {
+ result.o2 = o2;
+ } catch (MetaException o3) {
+ result.o3 = o3;
+ }
+ return result;
+ }
+ }
+
      public static class drop_partition<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_partition_args> {
        public drop_partition() {
          super("drop_partition");
@@ -6248,6 +6731,33 @@ public class ThriftHiveMetastore {
        }
      }

+ public static class drop_partition_with_environment_context<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_partition_with_environment_context_args> {
+ public drop_partition_with_environment_context() {
+ super("drop_partition_with_environment_context");
+ }
+
+ public drop_partition_with_environment_context_args getEmptyArgsInstance() {
+ return new drop_partition_with_environment_context_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public drop_partition_with_environment_context_result getResult(I iface, drop_partition_with_environment_context_args args) throws org.apache.thrift.TException {
+ drop_partition_with_environment_context_result result = new drop_partition_with_environment_context_result();
+ try {
+ result.success = iface.drop_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.deleteData, args.environment_context);
+ result.setSuccessIsSet(true);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
      public static class drop_partition_by_name<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_partition_by_name_args> {
        public drop_partition_by_name() {
          super("drop_partition_by_name");
@@ -6275,6 +6785,33 @@ public class ThriftHiveMetastore {
        }
      }

+ public static class drop_partition_by_name_with_environment_context<I extends Iface> extends org.apache.thrift.ProcessFunction<I, drop_partition_by_name_with_environment_context_args> {
+ public drop_partition_by_name_with_environment_context() {
+ super("drop_partition_by_name_with_environment_context");
+ }
+
+ public drop_partition_by_name_with_environment_context_args getEmptyArgsInstance() {
+ return new drop_partition_by_name_with_environment_context_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public drop_partition_by_name_with_environment_context_result getResult(I iface, drop_partition_by_name_with_environment_context_args args) throws org.apache.thrift.TException {
+ drop_partition_by_name_with_environment_context_result result = new drop_partition_by_name_with_environment_context_result();
+ try {
+ result.success = iface.drop_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.deleteData, args.environment_context);
+ result.setSuccessIsSet(true);
+ } catch (NoSuchObjectException o1) {
+ result.o1 = o1;
+ } catch (MetaException o2) {
+ result.o2 = o2;
+ }
+ return result;
+ }
+ }
+
      public static class get_partition<I extends Iface> extends org.apache.thrift.ProcessFunction<I, get_partition_args> {
        public get_partition() {
          super("get_partition");
@@ -16721,7 +17258,7 @@ public class ThriftHiveMetastore {
                    for (int _i273 = 0; _i273 < _map272.size; ++_i273)
                    {
                      String _key274; // required
- Type _val275; // optional
+ Type _val275; // required
                      _key274 = iprot.readString();
                      _val275 = new Type();
                      _val275.read(iprot);
@@ -16825,7 +17362,7 @@ public class ThriftHiveMetastore {
              for (int _i279 = 0; _i279 < _map278.size; ++_i279)
              {
                String _key280; // required
- Type _val281; // optional
+ Type _val281; // required
                _key280 = iprot.readString();
                _val281 = new Type();
                _val281.read(iprot);
@@ -22449,25 +22986,31 @@ public class ThriftHiveMetastore {

    }

- public static class get_tables_args implements org.apache.thrift.TBase<get_tables_args, get_tables_args._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("get_tables_args");
+ public static class drop_table_with_environment_context_args implements org.apache.thrift.TBase<drop_table_with_environment_context_args, drop_table_with_environment_context_args._Fields>, java.io.Serializable, Cloneable {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("drop_table_with_environment_context_args");

- private static final org.apache.thrift.protocol.TField DB_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("db_name", org.apache.thrift.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift.protocol.TField PATTERN_FIELD_DESC = new org.apache.thrift.protocol.TField("pattern", org.apache.thrift.protocol.TType.STRING, (short)2);
+ private static final org.apache.thrift.protocol.TField DBNAME_FIELD_DESC = new org.apache.thrift.protocol.TField("dbname", org.apache.thrift.protocol.TType.STRING, (short)1);
+ private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)2);
+ private static final org.apache.thrift.protocol.TField DELETE_DATA_FIELD_DESC = new org.apache.thrift.protocol.TField("deleteData", org.apache.thrift.protocol.TType.BOOL, (short)3);
+ private static final org.apache.thrift.protocol.TField ENVIRONMENT_CONTEXT_FIELD_DESC = new org.apache.thrift.protocol.TField("environment_context", org.apache.thrift.protocol.TType.STRUCT, (short)4);

      private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
      static {
- schemes.put(StandardScheme.class, new get_tables_argsStandardSchemeFactory());
- schemes.put(TupleScheme.class, new get_tables_argsTupleSchemeFactory());
+ schemes.put(StandardScheme.class, new drop_table_with_environment_context_argsStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new drop_table_with_environment_context_argsTupleSchemeFactory());
      }

- private String db_name; // required
- private String pattern; // required
+ private String dbname; // required
+ private String name; // required
+ private boolean deleteData; // required
+ private EnvironmentContext environment_context; // required

      /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
      public enum _Fields implements org.apache.thrift.TFieldIdEnum {
- DB_NAME((short)1, "db_name"),
- PATTERN((short)2, "pattern");
+ DBNAME((short)1, "dbname"),
+ NAME((short)2, "name"),
+ DELETE_DATA((short)3, "deleteData"),
+ ENVIRONMENT_CONTEXT((short)4, "environment_context");

        private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();

@@ -22482,10 +23025,14 @@ public class ThriftHiveMetastore {
         */
        public static _Fields findByThriftId(int fieldId) {
          switch(fieldId) {
- case 1: // DB_NAME
- return DB_NAME;
- case 2: // PATTERN
- return PATTERN;
+ case 1: // DBNAME
+ return DBNAME;
+ case 2: // NAME
+ return NAME;
+ case 3: // DELETE_DATA
+ return DELETE_DATA;
+ case 4: // ENVIRONMENT_CONTEXT
+ return ENVIRONMENT_CONTEXT;
            default:
              return null;
          }
@@ -22526,112 +23073,192 @@ public class ThriftHiveMetastore {
      }

      // isset id assignments
+ private static final int __DELETEDATA_ISSET_ID = 0;
+ private byte __isset_bitfield = 0;
      public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
      static {
        Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("db_name", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ tmpMap.put(_Fields.DBNAME, new org.apache.thrift.meta_data.FieldMetaData("dbname", org.apache.thrift.TFieldRequirementType.DEFAULT,
            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
- tmpMap.put(_Fields.PATTERN, new org.apache.thrift.meta_data.FieldMetaData("pattern", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.DEFAULT,
            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.DELETE_DATA, new org.apache.thrift.meta_data.FieldMetaData("deleteData", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
+ tmpMap.put(_Fields.ENVIRONMENT_CONTEXT, new org.apache.thrift.meta_data.FieldMetaData("environment_context", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, EnvironmentContext.class)));
        metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(get_tables_args.class, metaDataMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(drop_table_with_environment_context_args.class, metaDataMap);
      }

- public get_tables_args() {
+ public drop_table_with_environment_context_args() {
      }

- public get_tables_args(
- String db_name,
- String pattern)
+ public drop_table_with_environment_context_args(
+ String dbname,
+ String name,
+ boolean deleteData,
+ EnvironmentContext environment_context)
      {
        this();
- this.db_name = db_name;
- this.pattern = pattern;
+ this.dbname = dbname;
+ this.name = name;
+ this.deleteData = deleteData;
+ setDeleteDataIsSet(true);
+ this.environment_context = environment_context;
      }

      /**
       * Performs a deep copy on <i>other</i>.
       */
- public get_tables_args(get_tables_args other) {
- if (other.isSetDb_name()) {
- this.db_name = other.db_name;
+ public drop_table_with_environment_context_args(drop_table_with_environment_context_args other) {
+ __isset_bitfield = other.__isset_bitfield;
+ if (other.isSetDbname()) {
+ this.dbname = other.dbname;
        }
- if (other.isSetPattern()) {
- this.pattern = other.pattern;
+ if (other.isSetName()) {
+ this.name = other.name;
+ }
+ this.deleteData = other.deleteData;
+ if (other.isSetEnvironment_context()) {
+ this.environment_context = new EnvironmentContext(other.environment_context);
        }
      }

- public get_tables_args deepCopy() {
- return new get_tables_args(this);
+ public drop_table_with_environment_context_args deepCopy() {
+ return new drop_table_with_environment_context_args(this);
      }

      @Override
      public void clear() {
- this.db_name = null;
- this.pattern = null;
+ this.dbname = null;
+ this.name = null;
+ setDeleteDataIsSet(false);
+ this.deleteData = false;
+ this.environment_context = null;
      }

- public String getDb_name() {
- return this.db_name;
+ public String getDbname() {
+ return this.dbname;
      }

- public void setDb_name(String db_name) {
- this.db_name = db_name;
+ public void setDbname(String dbname) {
+ this.dbname = dbname;
      }

- public void unsetDb_name() {
- this.db_name = null;
+ public void unsetDbname() {
+ this.dbname = null;
      }

- /** Returns true if field db_name is set (has been assigned a value) and false otherwise */
- public boolean isSetDb_name() {
- return this.db_name != null;
+ /** Returns true if field dbname is set (has been assigned a value) and false otherwise */
+ public boolean isSetDbname() {
+ return this.dbname != null;
      }

- public void setDb_nameIsSet(boolean value) {
+ public void setDbnameIsSet(boolean value) {
        if (!value) {
- this.db_name = null;
+ this.dbname = null;
        }
      }

- public String getPattern() {
- return this.pattern;
+ public String getName() {
+ return this.name;
      }

- public void setPattern(String pattern) {
- this.pattern = pattern;
+ public void setName(String name) {
+ this.name = name;
      }

- public void unsetPattern() {
- this.pattern = null;
+ public void unsetName() {
+ this.name = null;
      }

- /** Returns true if field pattern is set (has been assigned a value) and false otherwise */
- public boolean isSetPattern() {
- return this.pattern != null;
+ /** Returns true if field name is set (has been assigned a value) and false otherwise */
+ public boolean isSetName() {
+ return this.name != null;
      }

- public void setPatternIsSet(boolean value) {
+ public void setNameIsSet(boolean value) {
        if (!value) {
- this.pattern = null;
+ this.name = null;
+ }
+ }
+
+ public boolean isDeleteData() {
+ return this.deleteData;
+ }
+
+ public void setDeleteData(boolean deleteData) {
+ this.deleteData = deleteData;
+ setDeleteDataIsSet(true);
+ }
+
+ public void unsetDeleteData() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __DELETEDATA_ISSET_ID);
+ }
+
+ /** Returns true if field deleteData is set (has been assigned a value) and false otherwise */
+ public boolean isSetDeleteData() {
+ return EncodingUtils.testBit(__isset_bitfield, __DELETEDATA_ISSET_ID);
+ }
+
+ public void setDeleteDataIsSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __DELETEDATA_ISSET_ID, value);
+ }
+
+ public EnvironmentContext getEnvironment_context() {
+ return this.environment_context;
+ }
+
+ public void setEnvironment_context(EnvironmentContext environment_context) {
+ this.environment_context = environment_context;
+ }
+
+ public void unsetEnvironment_context() {
+ this.environment_context = null;
+ }
+
+ /** Returns true if field environment_context is set (has been assigned a value) and false otherwise */
+ public boolean isSetEnvironment_context() {
+ return this.environment_context != null;
+ }
+
+ public void setEnvironment_contextIsSet(boolean value) {
+ if (!value) {
+ this.environment_context = null;
        }
      }

      public void setFieldValue(_Fields field, Object value) {
        switch (field) {
- case DB_NAME:
+ case DBNAME:
          if (value == null) {
- unsetDb_name();
+ unsetDbname();
          } else {
- setDb_name((String)value);
+ setDbname((String)value);
          }
          break;

- case PATTERN:
+ case NAME:
          if (value == null) {
- unsetPattern();
+ unsetName();
          } else {
- setPattern((String)value);
+ setName((String)value);
+ }
+ break;
+
+ case DELETE_DATA:
+ if (value == null) {
+ unsetDeleteData();
+ } else {
+ setDeleteData((Boolean)value);
+ }
+ break;
+
+ case ENVIRONMENT_CONTEXT:
+ if (value == null) {
+ unsetEnvironment_context();
+ } else {
+ setEnvironment_context((EnvironmentContext)value);
          }
          break;

@@ -22640,11 +23267,17 @@ public class ThriftHiveMetastore {

      public Object getFieldValue(_Fields field) {
        switch (field) {
- case DB_NAME:
- return getDb_name();
+ case DBNAME:
+ return getDbname();

- case PATTERN:
- return getPattern();
+ case NAME:
+ return getName();
+
+ case DELETE_DATA:
+ return Boolean.valueOf(isDeleteData());
+
+ case ENVIRONMENT_CONTEXT:
+ return getEnvironment_context();

        }
        throw new IllegalStateException();
@@ -22657,10 +23290,14 @@ public class ThriftHiveMetastore {
        }

        switch (field) {
- case DB_NAME:
- return isSetDb_name();
- case PATTERN:
- return isSetPattern();
+ case DBNAME:
+ return isSetDbname();
+ case NAME:
+ return isSetName();
+ case DELETE_DATA:
+ return isSetDeleteData();
+ case ENVIRONMENT_CONTEXT:
+ return isSetEnvironment_context();
        }
        throw new IllegalStateException();
      }
@@ -22669,30 +23306,48 @@ public class ThriftHiveMetastore {
      public boolean equals(Object that) {
        if (that == null)
          return false;
- if (that instanceof get_tables_args)
- return this.equals((get_tables_args)that);
+ if (that instanceof drop_table_with_environment_context_args)
+ return this.equals((drop_table_with_environment_context_args)that);
        return false;
      }

- public boolean equals(get_tables_args that) {
+ public boolean equals(drop_table_with_environment_context_args that) {
        if (that == null)
          return false;

- boolean this_present_db_name = true && this.isSetDb_name();
- boolean that_present_db_name = true && that.isSetDb_name();
- if (this_present_db_name || that_present_db_name) {
- if (!(this_present_db_name && that_present_db_name))
+ boolean this_present_dbname = true && this.isSetDbname();
+ boolean that_present_dbname = true && that.isSetDbname();
+ if (this_present_dbname || that_present_dbname) {
+ if (!(this_present_dbname && that_present_dbname))
            return false;
- if (!this.db_name.equals(that.db_name))
+ if (!this.dbname.equals(that.dbname))
            return false;
        }

- boolean this_present_pattern = true && this.isSetPattern();
- boolean that_present_pattern = true && that.isSetPattern();
- if (this_present_pattern || that_present_pattern) {
- if (!(this_present_pattern && that_present_pattern))
+ boolean this_present_name = true && this.isSetName();
+ boolean that_present_name = true && that.isSetName();
+ if (this_present_name || that_present_name) {
+ if (!(this_present_name && that_present_name))
            return false;
- if (!this.pattern.equals(that.pattern))
+ if (!this.name.equals(that.name))
+ return false;
+ }
+
+ boolean this_present_deleteData = true;
+ boolean that_present_deleteData = true;
+ if (this_present_deleteData || that_present_deleteData) {
+ if (!(this_present_deleteData && that_present_deleteData))
+ return false;
+ if (this.deleteData != that.deleteData)
+ return false;
+ }
+
+ boolean this_present_environment_context = true && this.isSetEnvironment_context();
+ boolean that_present_environment_context = true && that.isSetEnvironment_context();
+ if (this_present_environment_context || that_present_environment_context) {
+ if (!(this_present_environment_context && that_present_environment_context))
+ return false;
+ if (!this.environment_context.equals(that.environment_context))
            return false;
        }

@@ -22703,43 +23358,73 @@ public class ThriftHiveMetastore {
      public int hashCode() {
        HashCodeBuilder builder = new HashCodeBuilder();

- boolean present_db_name = true && (isSetDb_name());
- builder.append(present_db_name);
- if (present_db_name)
- builder.append(db_name);
+ boolean present_dbname = true && (isSetDbname());
+ builder.append(present_dbname);
+ if (present_dbname)
+ builder.append(dbname);

- boolean present_pattern = true && (isSetPattern());
- builder.append(present_pattern);
- if (present_pattern)
- builder.append(pattern);
+ boolean present_name = true && (isSetName());
+ builder.append(present_name);
+ if (present_name)
+ builder.append(name);
+
+ boolean present_deleteData = true;
+ builder.append(present_deleteData);
+ if (present_deleteData)
+ builder.append(deleteData);
+
+ boolean present_environment_context = true && (isSetEnvironment_context());
+ builder.append(present_environment_context);
+ if (present_environment_context)
+ builder.append(environment_context);

        return builder.toHashCode();
      }

- public int compareTo(get_tables_args other) {
+ public int compareTo(drop_table_with_environment_context_args other) {
        if (!getClass().equals(other.getClass())) {
          return getClass().getName().compareTo(other.getClass().getName());
        }

        int lastComparison = 0;
- get_tables_args typedOther = (get_tables_args)other;
+ drop_table_with_environment_context_args typedOther = (drop_table_with_environment_context_args)other;

- lastComparison = Boolean.valueOf(isSetDb_name()).compareTo(typedOther.isSetDb_name());
+ lastComparison = Boolean.valueOf(isSetDbname()).compareTo(typedOther.isSetDbname());
        if (lastComparison != 0) {
          return lastComparison;
        }
- if (isSetDb_name()) {
- lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.db_name, typedOther.db_name);
+ if (isSetDbname()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dbname, typedOther.dbname);
          if (lastComparison != 0) {
            return lastComparison;
          }
        }
- lastComparison = Boolean.valueOf(isSetPattern()).compareTo(typedOther.isSetPattern());
+ lastComparison = Boolean.valueOf(isSetName()).compareTo(typedOther.isSetName());
        if (lastComparison != 0) {
          return lastComparison;
        }
- if (isSetPattern()) {
- lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.pattern, typedOther.pattern);
+ if (isSetName()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, typedOther.name);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(isSetDeleteData()).compareTo(typedOther.isSetDeleteData());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetDeleteData()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.deleteData, typedOther.deleteData);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(isSetEnvironment_context()).compareTo(typedOther.isSetEnvironment_context());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (isSetEnvironment_context()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.environment_context, typedOther.environment_context);
          if (lastComparison != 0) {
            return lastComparison;
          }
@@ -22761,22 +23446,34 @@ public class ThriftHiveMetastore {

      @Override
      public String toString() {
- StringBuilder sb = new StringBuilder("get_tables_args(");
+ StringBuilder sb = new StringBuilder("drop_table_with_environment_context_args(");
        boolean first = true;

- sb.append("db_name:");
- if (this.db_name == null) {
+ sb.append("dbname:");
+ if (this.dbname == null) {
          sb.append("null");
        } else {
- sb.append(this.db_name);
+ sb.append(this.dbname);
        }
        first = false;
        if (!first) sb.append(", ");
- sb.append("pattern:");
- if (this.pattern == null) {
+ sb.append("name:");
+ if (this.name == null) {
          sb.append("null");
        } else {
- sb.append(this.pattern);
+ sb.append(this.name);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("deleteData:");
+ sb.append(this.deleteData);
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("environment_context:");
+ if (this.environment_context == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.environment_context);
        }
        first = false;
        sb.append(")");
@@ -22786,6 +23483,9 @@ public class ThriftHiveMetastore {
      public void validate() throws org.apache.thrift.TException {
        // check for required fields
        // check for sub-struct validity
+ if (environment_context != null) {
+ environment_context.validate();
+ }
      }

      private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
@@ -22798,21 +23498,23 @@ public class ThriftHiveMetastore {

      private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
        try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bitfield = 0;
          read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
        } catch (org.apache.thrift.TException te) {
          throw new java.io.IOException(te);
        }
      }

- private static class get_tables_argsStandardSchemeFactory implements SchemeFactory {
- public get_tables_argsStandardScheme getScheme() {
- return new get_tables_argsStandardScheme();
+ private static class drop_table_with_environment_context_argsStandardSchemeFactory implements SchemeFactory {
+ public drop_table_with_environment_context_argsStandardScheme getScheme() {
+ return new drop_table_with_environment_context_argsStandardScheme();
        }
      }

- private static class get_tables_argsStandardScheme extends StandardScheme<get_tables_args> {
+ private static class drop_table_with_environment_context_argsStandardScheme extends StandardScheme<drop_table_with_environment_context_args> {

- public void read(org.apache.thrift.protocol.TProtocol iprot, get_tables_args struct) throws org.apache.thrift.TException {
+ public void read(org.apache.thrift.protocol.TProtocol iprot, drop_table_with_environment_context_args struct) throws org.apache.thrift.TException {
          org.apache.thrift.protocol.TField schemeField;
          iprot.readStructBegin();
          while (true)
@@ -22822,18 +23524,35 @@ public class ThriftHiveMetastore {
              break;
            }
            switch (schemeField.id) {
- case 1: // DB_NAME
+ case 1: // DBNAME
                if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
- struct.db_name = iprot.readString();
- struct.setDb_nameIsSet(true);
+ struct.dbname = iprot.readString();
+ struct.setDbnameIsSet(true);
                } else {
                  org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
                }
                break;
- case 2: // PATTERN
+ case 2: // NAME
                if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
- struct.pattern = iprot.readString();
- struct.setPatternIsSet(true);
+ struct.name = iprot.readString();
+ struct.setNameIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 3: // DELETE_DATA
+ if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
+ struct.deleteData = iprot.readBool();
+ struct.setDeleteDataIsSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 4: // ENVIRONMENT_CONTEXT
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.environment_context = new EnvironmentContext();
+ struct.environment_context.read(iprot);
+ struct.setEnvironment_contextIsSet(true);
                } else {
                  org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
                }
@@ -22847,18 +23566,26 @@ public class ThriftHiveMetastore {
          struct.validate();
        }

- public void write(org.apache.thrift.protocol.TProtocol oprot, get_tables_args struct) throws org.apache.thrift.TException {
+ public void write(org.apache.thrift.protocol.TProtocol oprot, drop_table_with_environment_context_args struct) throws org.apache.thrift.TException {
          struct.validate();

          oprot.writeStructBegin(STRUCT_DESC);
- if (struct.db_name != null) {
- oprot.writeFieldBegin(DB_NAME_FIELD_DESC);
- oprot.writeString(struct.db_name);
+ if (struct.dbname != null) {
+ oprot.writeFieldBegin(DBNAME_FIELD_DESC);
+ oprot.writeString(struct.dbname);
            oprot.writeFieldEnd();
          }
- if (struct.pattern != null) {
- oprot.writeFieldBegin(PATTERN_FIELD_DESC);
- oprot.writeString(struct.pattern);
+ if (struct.name != null) {
+ oprot.writeFieldBegin(NAME_FIELD_DESC);
+ oprot.writeString(struct.name);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldBegin(DELETE_DATA_FIELD_DESC);
+ oprot.writeBool(struct.deleteData);
+ oprot.writeFieldEnd();
+ if (struct.environment_context != null) {
+ oprot.writeFieldBegin(ENVIRONMENT_CONTEXT_FIELD_DESC);
+ struct.environment_context.write(oprot);
            oprot.writeFieldEnd();
          }
          oprot.writeFieldStop();
@@ -22867,69 +23594,90 @@ public class ThriftHiveMetastore {

      }

- private static class get_tables_argsTupleSchemeFactory implements SchemeFactory {
- public get_tables_argsTupleScheme getScheme() {
- return new get_tables_argsTupleScheme();
+ private static class drop_table_with_environment_context_argsTupleSchemeFactory implements SchemeFactory {
+ public drop_table_with_environment_context_argsTupleScheme getScheme() {
+ return new drop_table_with_environment_context_argsTupleScheme();
        }
      }

- private static class get_tables_argsTupleScheme extends TupleScheme<get_tables_args> {
+ private static class drop_table_with_environment_context_argsTupleScheme extends TupleScheme<drop_table_with_environment_context_args> {

        @Override
- public void write(org.apache.thrift.protocol.TProtocol prot, get_tables_args struct) throws org.apache.thrift.TException {
+ public void write(org.apache.thrift.protocol.TProtocol prot, drop_table_with_environment_context_args struct) throws org.apache.thrift.TException {
          TTupleProtocol oprot = (TTupleProtocol) prot;
          BitSet optionals = new BitSet();
- if (struct.isSetDb_name()) {
+ if (struct.isSetDbname()) {
            optionals.set(0);
          }
- if (struct.isSetPattern()) {
+ if (struct.isSetName()) {
            optionals.set(1);
          }
- oprot.writeBitSet(optionals, 2);
- if (struct.isSetDb_name()) {
- oprot.writeString(struct.db_name);
+ if (struct.isSetDeleteData()) {
+ optionals.set(2);
          }
- if (struct.isSetPattern()) {
- oprot.writeString(struct.pattern);
+ if (struct.isSetEnvironment_context()) {
+ optionals.set(3);
+ }
+ oprot.writeBitSet(optionals, 4);
+ if (struct.isSetDbname()) {
+ oprot.writeString(struct.dbname);
+ }
+ if (struct.isSetName()) {
+ oprot.writeString(struct.name);
+ }
+ if (struct.isSetDeleteData()) {
+ oprot.writeBool(struct.deleteData);
+ }
+ if (struct.isSetEnvironment_context()) {
+ struct.environment_context.write(oprot);
          }
        }

        @Override
- public void read(org.apache.thrift.protocol.TProtocol prot, get_tables_args struct) throws org.apache.thrift.TException {
+ public void read(org.apache.thrift.protocol.TProtocol prot, drop_table_with_environment_context_args struct) throws org.apache.thrift.TException {
          TTupleProtocol iprot = (TTupleProtocol) prot;
- BitSet incoming = iprot.readBitSet(2);
+ BitSet incoming = iprot.readBitSet(4);
          if (incoming.get(0)) {
- struct.db_name = iprot.readString();
- struct.setDb_nameIsSet(true);
+ struct.dbname = iprot.readString();
+ struct.setDbnameIsSet(true);
          }
          if (incoming.get(1)) {
- struct.pattern = iprot.readString();
- struct.setPatternIsSet(true);
+ struct.name = iprot.readString();
+ struct.setNameIsSet(true);
+ }
+ if (incoming.get(2)) {
+ struct.deleteData = iprot.readBool();
+ struct.setDeleteDataIsSet(true);
+ }
+ if (incoming.get(3)) {
+ struct.environment_context = new EnvironmentContext();
+ struct.environment_context.read(iprot);
+ struct.setEnvironment_contextIsSet(true);
          }
        }
      }

    }

- public static class get_tables_result implements org.apache.thrift.TBase<get_tables_result, get_tables_result._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("get_tables_result");
+ public static class drop_table_with_environment_context_result implements org.apache.thrift.TBase<drop_table_with_environment_context_result, drop_table_with_environment_context_result._Fields>, java.io.Serializable, Cloneable {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("drop_table_with_environment_context_result");

- private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.LIST, (short)0);
      private static final org.apache.thrift.protocol.TField O1_FIELD_DESC = new org.apache.thrift.protocol.TField("o1", org.apache.thrift.protocol.TType.STRUCT, (short)1);
+ private static final org.apache.thrift.protocol.TField O3_FIELD_DESC = new org.apache.thrift.protocol.TField("o3", org.apache.thrift.protocol.TType.STRUCT, (short)2);

      private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
      static {
- schemes.put(StandardScheme.class, new get_tables_resultStandardSchemeFactory());
- schemes.put(TupleScheme.class, new get_tables_resultTupleSchemeFactory());
+ schemes.put(StandardScheme.class, new drop_table_with_environment_context_resultStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new drop_table_with_environment_context_resultTupleSchemeFactory());
      }

- private List<String> success; // required
- private MetaException o1; // required
+ private NoSuchObjectException o1; // required
+ private MetaException o3; // required

      /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
      public enum _Fields implements org.apache.thrift.TFieldIdEnum {
- SUCCESS((short)0, "success"),
- O1((short)1, "o1");
+ O1((short)1, "o1"),
+ O3((short)2, "o3");

        private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();

@@ -22944,10 +23692,10 @@ public class ThriftHiveMetastore {
         */
        public static _Fields findByThriftId(int fieldId) {
          switch(fieldId) {
- case 0: // SUCCESS
- return SUCCESS;
            case 1: // O1
              return O1;
+ case 2: // O3
+ return O3;
            default:
              return null;
          }
@@ -22991,129 +23739,109 @@ public class ThriftHiveMetastore {
      public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
      static {
        Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
- new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
- new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
        tmpMap.put(_Fields.O1, new org.apache.thrift.meta_data.FieldMetaData("o1", org.apache.thrift.TFieldRequirementType.DEFAULT,
            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
+ tmpMap.put(_Fields.O3, new org.apache.thrift.meta_data.FieldMetaData("o3", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
        metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(get_tables_result.class, metaDataMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(drop_table_with_environment_context_result.class, metaDataMap);
      }

- public get_tables_result() {
+ public drop_table_with_environment_context_result() {
      }

- public get_tables_result(
- List<String> success,
- MetaException o1)
+ public drop_table_with_environment_context_result(
+ NoSuchObjectException o1,
+ MetaException o3)
      {
        this();
- this.success = success;
        this.o1 = o1;
+ this.o3 = o3;
      }

      /**
       * Performs a deep copy on <i>other</i>.
       */
- public get_tables_result(get_tables_result other) {
- if (other.isSetSuccess()) {
- List<String> __this__success = new ArrayList<String>();
- for (String other_element : other.success) {
- __this__success.add(other_element);
- }
- this.success = __this__success;
- }
+ public drop_table_with_environment_context_result(drop_table_with_environment_context_result other) {
        if (other.isSetO1()) {
- this.o1 = new MetaException(other.o1);
+ this.o1 = new NoSuchObjectException(other.o1);
+ }
+ if (other.isSetO3()) {
+ this.o3 = new MetaException(other.o3);
        }
      }

- public get_tables_result deepCopy() {
- return new get_tables_result(this);
+ public drop_table_with_environment_context_result deepCopy() {
+ return new drop_table_with_environment_context_result(this);
      }

      @Override
      public void clear() {
- this.success = null;
        this.o1 = null;
+ this.o3 = null;
      }

- public int getSuccessSize() {
- return (this.success == null) ? 0 : this.success.size();
- }
-
- public java.util.Iterator<String> getSuccessIterator() {
- return (this.success == null) ? null : this.success.iterator();
- }
-
- public void addToSuccess(String elem) {
- if (this.success == null) {
- this.success = new ArrayList<String>();
- }
- this.success.add(elem);
- }
-
- public List<String> getSuccess() {
- return this.success;
+ public NoSuchObjectException getO1() {
+ return this.o1;
      }

- public void setSuccess(List<String> success) {
- this.success = success;
+ public void setO1(NoSuchObjectException o1) {
+ this.o1 = o1;
      }

- public void unsetSuccess() {
- this.success = null;
+ public void unsetO1() {
+ this.o1 = null;
      }

- /** Returns true if field success is set (has been assigned a value) and false otherwise */
- public boolean isSetSuccess() {
- return this.success != null;
+ /** Returns true if field o1 is set (has been assigned a value) and false otherwise */
+ public boolean isSetO1() {
+ return this.o1 != null;
      }

- public void setSuccessIsSet(boolean value) {
+ public void setO1IsSet(boolean value) {
        if (!value) {
- this.success = null;
+ this.o1 = null;
        }
      }

- public MetaException getO1() {
- return this.o1;
+ public MetaException getO3() {
+ return this.o3;
      }

- public void setO1(MetaException o1) {
- this.o1 = o1;
+ public void setO3(MetaException o3) {
+ this.o3 = o3;
      }

- public void unsetO1() {
- this.o1 = null;
+ public void unsetO3() {
+ this.o3 = null;
      }

- /** Returns true if field o1 is set (has been assigned a value) and false otherwise */
- public boolean isSetO1() {
- return this.o1 != null;
+ /** Returns true if field o3 is set (has been assigned a value) and false otherwise */
+ public boolean isSetO3() {
+ return this.o3 != null;
      }

- public void setO1IsSet(boolean value) {
+ public void setO3IsSet(boolean value) {
        if (!value) {
- this.o1 = null;
+ this.o3 = null;
        }
      }

      public void setFieldValue(_Fields field, Object value) {
        switch (field) {
- case SUCCESS:
+ case O1:
          if (value == null) {
- unsetSuccess();
+ unsetO1();
          } else {
- setSuccess((List<String>)value);
+ setO1((NoSuchObjectException)value);
          }
          break;

- case O1:
+ case O3:
          if (value == null) {
- unsetO1();
+ unsetO3();
          } else {
- setO1((MetaException)value);
+ setO3((MetaException)value);
          }
          break;

@@ -23122,12 +23850,12 @@ public class ThriftHiveMetastore {

      public Object getFieldValue(_Fields field) {
        switch (field) {
- case SUCCESS:
- return getSuccess();
-
        case O1:
          return getO1();

+ case O3:
+ return getO3();
+
        }
        throw new IllegalStateException();
      }
@@ -23139,10 +23867,10 @@ public class ThriftHiveMetastore {
        }

        switch (field) {
- case SUCCESS:
- return isSetSuccess();
        case O1:
          return isSetO1();
+ case O3:
+ return isSetO3();
        }
        throw new IllegalStateException();
      }
@@ -23151,24 +23879,15 @@ public class ThriftHiveMetastore {
      public boolean equals(Object that) {
        if (that == null)
          return false;
- if (that instanceof get_tables_result)
- return this.equals((get_tables_result)that);
+ if (that instanceof drop_table_with_environment_context_result)
+ return this.equals((drop_table_with_environment_context_result)that);
        return false;
      }

- public boolean equals(get_tables_result that) {
+ public boolean equals(drop_table_with_environment_context_result that) {
        if (that == null)
          return false;

- boolean this_present_success = true && this.isSetSuccess();
- boolean that_present_success = true && that.isSetSuccess();
- if (this_present_success || that_present_success) {
- if (!(this_present_success && that_present_success))
- return false;
- if (!this.success.equals(that.success))
- return false;
- }
-
        boolean this_present_o1 = true && this.isSetO1();
        boolean that_present_o1 = true && that.isSetO1();
        if (this_present_o1 || that_present_o1) {
@@ -23178,6 +23897,15 @@ public class ThriftHiveMetastore {
            return false;
        }

+ boolean this_present_o3 = true && this.isSetO3();
+ boolean that_present_o3 = true && that.isSetO3();
+ if (this_present_o3 || that_present_o3) {
+ if (!(this_present_o3 && that_present_o3))
+ return false;
+ if (!this.o3.equals(that.o3))
+ return false;
+ }
+
        return true;
      }

@@ -23185,43 +23913,43 @@ public class ThriftHiveMetastore {
      public int hashCode() {
        HashCodeBuilder builder = new HashCodeBuilder();

- boolean present_success = true && (isSetSuccess());
- builder.append(present_success);
- if (present_success)
- builder.append(success);
-
        boolean present_o1 = true && (isSetO1());
        builder.append(present_o1);
        if (present_o1)
          builder.append(o1);

+ boolean present_o3 = true && (isSetO3());
+ builder.append(present_o3);
+ if (present_o3)
+ builder.append(o3);
+
        return builder.toHashCode();
      }

- public int compareTo(get_tables_result other) {
+ public int compareTo(drop_table_with_environment_context_result other) {
        if (!getClass().equals(other.getClass())) {
          return getClass().getName().compareTo(other.getClass().getName());
        }

        int lastComparison = 0;
- get_tables_result typedOther = (get_tables_result)other;
+ drop_table_with_environment_context_result typedOther = (drop_table_with_environment_context_result)other;

- lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(typedOther.isSetSuccess());
+ lastComparison = Boolean.valueOf(isSetO1()).compareTo(typedOther.isSetO1());
        if (lastComparison != 0) {
          return lastComparison;
        }
- if (isSetSuccess()) {
- lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, typedOther.success);
+ if (isSetO1()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.o1, typedOther.o1);
          if (lastComparison != 0) {
            return lastComparison;
          }
        }
- lastComparison = Boolean.valueOf(isSetO1()).compareTo(typedOther.isSetO1());
+ lastComparison = Boolean.valueOf(isSetO3()).compareTo(typedOther.isSetO3());
        if (lastComparison != 0) {
          return lastComparison;
        }
- if (isSetO1()) {
- lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.o1, typedOther.o1);
+ if (isSetO3()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.o3, typedOther.o3);
          if (lastComparison != 0) {
            return lastComparison;
          }
@@ -23243,22 +23971,22 @@ public class ThriftHiveMetastore {

      @Override
      public String toString() {
- StringBuilder sb = new StringBuilder("get_tables_result(");
+ StringBuilder sb = new StringBuilder("drop_table_with_environment_context_result(");
        boolean first = true;

- sb.append("success:");
- if (this.success == null) {
+ sb.append("o1:");
+ if (this.o1 == null) {
          sb.append("null");
        } else {
- sb.append(this.success);
+ sb.append(this.o1);
        }
        first = false;
        if (!first) sb.append(", ");
- sb.append("o1:");
- if (this.o1 == null) {
+ sb.append("o3:");
+ if (this.o3 == null) {
          sb.append("null");
        } else {
- sb.append(this.o1);
+ sb.append(this.o3);
        }
        first = false;
        sb.append(")");
@@ -23286,15 +24014,15 @@ public class ThriftHiveMetastore {
        }
      }

- private static class get_tables_resultStandardSchemeFactory implements SchemeFactory {
- public get_tables_resultStandardScheme getScheme() {
- return new get_tables_resultStandardScheme();

[... 19368 lines stripped ...]

Search Discussions

Discussion Posts

Previous

Follow ups

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 4 of 8 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedFeb 12, '13 at 6:53p
activeFeb 12, '13 at 6:53p
posts8
users1
websitehive.apache.org

1 user in discussion

Kevinwilfong: 8 posts

People

Translate

site design / logo © 2021 Grokbase