FAQ
Modified: hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb?rev=1445309&r1=1445308&r2=1445309&view=diff
==============================================================================
--- hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb Tue Feb 12 18:52:55 2013
@@ -267,6 +267,22 @@ module ThriftHiveMetastore
        return
      end

+ def drop_table_with_environment_context(dbname, name, deleteData, environment_context)
+ send_drop_table_with_environment_context(dbname, name, deleteData, environment_context)
+ recv_drop_table_with_environment_context()
+ end
+
+ def send_drop_table_with_environment_context(dbname, name, deleteData, environment_context)
+ send_message('drop_table_with_environment_context', Drop_table_with_environment_context_args, :dbname => dbname, :name => name, :deleteData => deleteData, :environment_context => environment_context)
+ end
+
+ def recv_drop_table_with_environment_context()
+ result = receive_message(Drop_table_with_environment_context_result)
+ raise result.o1 unless result.o1.nil?
+ raise result.o3 unless result.o3.nil?
+ return
+ end
+
      def get_tables(db_name, pattern)
        send_get_tables(db_name, pattern)
        return recv_get_tables()
@@ -456,6 +472,24 @@ module ThriftHiveMetastore
        raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'append_partition failed: unknown result')
      end

+ def append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context)
+ send_append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context)
+ return recv_append_partition_with_environment_context()
+ end
+
+ def send_append_partition_with_environment_context(db_name, tbl_name, part_vals, environment_context)
+ send_message('append_partition_with_environment_context', Append_partition_with_environment_context_args, :db_name => db_name, :tbl_name => tbl_name, :part_vals => part_vals, :environment_context => environment_context)
+ end
+
+ def recv_append_partition_with_environment_context()
+ result = receive_message(Append_partition_with_environment_context_result)
+ return result.success unless result.success.nil?
+ raise result.o1 unless result.o1.nil?
+ raise result.o2 unless result.o2.nil?
+ raise result.o3 unless result.o3.nil?
+ raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'append_partition_with_environment_context failed: unknown result')
+ end
+
      def append_partition_by_name(db_name, tbl_name, part_name)
        send_append_partition_by_name(db_name, tbl_name, part_name)
        return recv_append_partition_by_name()
@@ -474,6 +508,24 @@ module ThriftHiveMetastore
        raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'append_partition_by_name failed: unknown result')
      end

+ def append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, environment_context)
+ send_append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, environment_context)
+ return recv_append_partition_by_name_with_environment_context()
+ end
+
+ def send_append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, environment_context)
+ send_message('append_partition_by_name_with_environment_context', Append_partition_by_name_with_environment_context_args, :db_name => db_name, :tbl_name => tbl_name, :part_name => part_name, :environment_context => environment_context)
+ end
+
+ def recv_append_partition_by_name_with_environment_context()
+ result = receive_message(Append_partition_by_name_with_environment_context_result)
+ return result.success unless result.success.nil?
+ raise result.o1 unless result.o1.nil?
+ raise result.o2 unless result.o2.nil?
+ raise result.o3 unless result.o3.nil?
+ raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'append_partition_by_name_with_environment_context failed: unknown result')
+ end
+
      def drop_partition(db_name, tbl_name, part_vals, deleteData)
        send_drop_partition(db_name, tbl_name, part_vals, deleteData)
        return recv_drop_partition()
@@ -491,6 +543,23 @@ module ThriftHiveMetastore
        raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'drop_partition failed: unknown result')
      end

+ def drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context)
+ send_drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context)
+ return recv_drop_partition_with_environment_context()
+ end
+
+ def send_drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData, environment_context)
+ send_message('drop_partition_with_environment_context', Drop_partition_with_environment_context_args, :db_name => db_name, :tbl_name => tbl_name, :part_vals => part_vals, :deleteData => deleteData, :environment_context => environment_context)
+ end
+
+ def recv_drop_partition_with_environment_context()
+ result = receive_message(Drop_partition_with_environment_context_result)
+ return result.success unless result.success.nil?
+ raise result.o1 unless result.o1.nil?
+ raise result.o2 unless result.o2.nil?
+ raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'drop_partition_with_environment_context failed: unknown result')
+ end
+
      def drop_partition_by_name(db_name, tbl_name, part_name, deleteData)
        send_drop_partition_by_name(db_name, tbl_name, part_name, deleteData)
        return recv_drop_partition_by_name()
@@ -508,6 +577,23 @@ module ThriftHiveMetastore
        raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'drop_partition_by_name failed: unknown result')
      end

+ def drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context)
+ send_drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context)
+ return recv_drop_partition_by_name_with_environment_context()
+ end
+
+ def send_drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name, deleteData, environment_context)
+ send_message('drop_partition_by_name_with_environment_context', Drop_partition_by_name_with_environment_context_args, :db_name => db_name, :tbl_name => tbl_name, :part_name => part_name, :deleteData => deleteData, :environment_context => environment_context)
+ end
+
+ def recv_drop_partition_by_name_with_environment_context()
+ result = receive_message(Drop_partition_by_name_with_environment_context_result)
+ return result.success unless result.success.nil?
+ raise result.o1 unless result.o1.nil?
+ raise result.o2 unless result.o2.nil?
+ raise ::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT, 'drop_partition_by_name_with_environment_context failed: unknown result')
+ end
+
      def get_partition(db_name, tbl_name, part_vals)
        send_get_partition(db_name, tbl_name, part_vals)
        return recv_get_partition()
@@ -1497,6 +1583,19 @@ module ThriftHiveMetastore
        write_result(result, oprot, 'drop_table', seqid)
      end

+ def process_drop_table_with_environment_context(seqid, iprot, oprot)
+ args = read_args(iprot, Drop_table_with_environment_context_args)
+ result = Drop_table_with_environment_context_result.new()
+ begin
+ @handler.drop_table_with_environment_context(args.dbname, args.name, args.deleteData, args.environment_context)
+ rescue ::NoSuchObjectException => o1
+ result.o1 = o1
+ rescue ::MetaException => o3
+ result.o3 = o3
+ end
+ write_result(result, oprot, 'drop_table_with_environment_context', seqid)
+ end
+
      def process_get_tables(seqid, iprot, oprot)
        args = read_args(iprot, Get_tables_args)
        result = Get_tables_result.new()
@@ -1648,6 +1747,21 @@ module ThriftHiveMetastore
        write_result(result, oprot, 'append_partition', seqid)
      end

+ def process_append_partition_with_environment_context(seqid, iprot, oprot)
+ args = read_args(iprot, Append_partition_with_environment_context_args)
+ result = Append_partition_with_environment_context_result.new()
+ begin
+ result.success = @handler.append_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.environment_context)
+ rescue ::InvalidObjectException => o1
+ result.o1 = o1
+ rescue ::AlreadyExistsException => o2
+ result.o2 = o2
+ rescue ::MetaException => o3
+ result.o3 = o3
+ end
+ write_result(result, oprot, 'append_partition_with_environment_context', seqid)
+ end
+
      def process_append_partition_by_name(seqid, iprot, oprot)
        args = read_args(iprot, Append_partition_by_name_args)
        result = Append_partition_by_name_result.new()
@@ -1663,6 +1777,21 @@ module ThriftHiveMetastore
        write_result(result, oprot, 'append_partition_by_name', seqid)
      end

+ def process_append_partition_by_name_with_environment_context(seqid, iprot, oprot)
+ args = read_args(iprot, Append_partition_by_name_with_environment_context_args)
+ result = Append_partition_by_name_with_environment_context_result.new()
+ begin
+ result.success = @handler.append_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.environment_context)
+ rescue ::InvalidObjectException => o1
+ result.o1 = o1
+ rescue ::AlreadyExistsException => o2
+ result.o2 = o2
+ rescue ::MetaException => o3
+ result.o3 = o3
+ end
+ write_result(result, oprot, 'append_partition_by_name_with_environment_context', seqid)
+ end
+
      def process_drop_partition(seqid, iprot, oprot)
        args = read_args(iprot, Drop_partition_args)
        result = Drop_partition_result.new()
@@ -1676,6 +1805,19 @@ module ThriftHiveMetastore
        write_result(result, oprot, 'drop_partition', seqid)
      end

+ def process_drop_partition_with_environment_context(seqid, iprot, oprot)
+ args = read_args(iprot, Drop_partition_with_environment_context_args)
+ result = Drop_partition_with_environment_context_result.new()
+ begin
+ result.success = @handler.drop_partition_with_environment_context(args.db_name, args.tbl_name, args.part_vals, args.deleteData, args.environment_context)
+ rescue ::NoSuchObjectException => o1
+ result.o1 = o1
+ rescue ::MetaException => o2
+ result.o2 = o2
+ end
+ write_result(result, oprot, 'drop_partition_with_environment_context', seqid)
+ end
+
      def process_drop_partition_by_name(seqid, iprot, oprot)
        args = read_args(iprot, Drop_partition_by_name_args)
        result = Drop_partition_by_name_result.new()
@@ -1689,6 +1831,19 @@ module ThriftHiveMetastore
        write_result(result, oprot, 'drop_partition_by_name', seqid)
      end

+ def process_drop_partition_by_name_with_environment_context(seqid, iprot, oprot)
+ args = read_args(iprot, Drop_partition_by_name_with_environment_context_args)
+ result = Drop_partition_by_name_with_environment_context_result.new()
+ begin
+ result.success = @handler.drop_partition_by_name_with_environment_context(args.db_name, args.tbl_name, args.part_name, args.deleteData, args.environment_context)
+ rescue ::NoSuchObjectException => o1
+ result.o1 = o1
+ rescue ::MetaException => o2
+ result.o2 = o2
+ end
+ write_result(result, oprot, 'drop_partition_by_name_with_environment_context', seqid)
+ end
+
      def process_get_partition(seqid, iprot, oprot)
        args = read_args(iprot, Get_partition_args)
        result = Get_partition_result.new()
@@ -2850,6 +3005,46 @@ module ThriftHiveMetastore
      ::Thrift::Struct.generate_accessors self
    end

+ class Drop_table_with_environment_context_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ DBNAME = 1
+ NAME = 2
+ DELETEDATA = 3
+ ENVIRONMENT_CONTEXT = 4
+
+ FIELDS = {
+ DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbname'},
+ NAME => {:type => ::Thrift::Types::STRING, :name => 'name'},
+ DELETEDATA => {:type => ::Thrift::Types::BOOL, :name => 'deleteData'},
+ ENVIRONMENT_CONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'environment_context', :class => ::EnvironmentContext}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Drop_table_with_environment_context_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ O1 = 1
+ O3 = 2
+
+ FIELDS = {
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+ O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
    class Get_tables_args
      include ::Thrift::Struct, ::Thrift::Struct_Union
      DB_NAME = 1
@@ -3276,6 +3471,50 @@ module ThriftHiveMetastore
      ::Thrift::Struct.generate_accessors self
    end

+ class Append_partition_with_environment_context_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ DB_NAME = 1
+ TBL_NAME = 2
+ PART_VALS = 3
+ ENVIRONMENT_CONTEXT = 4
+
+ FIELDS = {
+ DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
+ TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'},
+ PART_VALS => {:type => ::Thrift::Types::LIST, :name => 'part_vals', :element => {:type => ::Thrift::Types::STRING}},
+ ENVIRONMENT_CONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'environment_context', :class => ::EnvironmentContext}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Append_partition_with_environment_context_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ SUCCESS = 0
+ O1 = 1
+ O2 = 2
+ O3 = 3
+
+ FIELDS = {
+ SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::Partition},
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::InvalidObjectException},
+ O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::AlreadyExistsException},
+ O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
    class Append_partition_by_name_args
      include ::Thrift::Struct, ::Thrift::Struct_Union
      DB_NAME = 1
@@ -3318,6 +3557,50 @@ module ThriftHiveMetastore
      ::Thrift::Struct.generate_accessors self
    end

+ class Append_partition_by_name_with_environment_context_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ DB_NAME = 1
+ TBL_NAME = 2
+ PART_NAME = 3
+ ENVIRONMENT_CONTEXT = 4
+
+ FIELDS = {
+ DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
+ TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'},
+ PART_NAME => {:type => ::Thrift::Types::STRING, :name => 'part_name'},
+ ENVIRONMENT_CONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'environment_context', :class => ::EnvironmentContext}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Append_partition_by_name_with_environment_context_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ SUCCESS = 0
+ O1 = 1
+ O2 = 2
+ O3 = 3
+
+ FIELDS = {
+ SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class => ::Partition},
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::InvalidObjectException},
+ O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::AlreadyExistsException},
+ O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
    class Drop_partition_args
      include ::Thrift::Struct, ::Thrift::Struct_Union
      DB_NAME = 1
@@ -3360,6 +3643,50 @@ module ThriftHiveMetastore
      ::Thrift::Struct.generate_accessors self
    end

+ class Drop_partition_with_environment_context_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ DB_NAME = 1
+ TBL_NAME = 2
+ PART_VALS = 3
+ DELETEDATA = 4
+ ENVIRONMENT_CONTEXT = 5
+
+ FIELDS = {
+ DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
+ TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'},
+ PART_VALS => {:type => ::Thrift::Types::LIST, :name => 'part_vals', :element => {:type => ::Thrift::Types::STRING}},
+ DELETEDATA => {:type => ::Thrift::Types::BOOL, :name => 'deleteData'},
+ ENVIRONMENT_CONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'environment_context', :class => ::EnvironmentContext}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Drop_partition_with_environment_context_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ SUCCESS = 0
+ O1 = 1
+ O2 = 2
+
+ FIELDS = {
+ SUCCESS => {:type => ::Thrift::Types::BOOL, :name => 'success'},
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+ O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
    class Drop_partition_by_name_args
      include ::Thrift::Struct, ::Thrift::Struct_Union
      DB_NAME = 1
@@ -3402,6 +3729,50 @@ module ThriftHiveMetastore
      ::Thrift::Struct.generate_accessors self
    end

+ class Drop_partition_by_name_with_environment_context_args
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ DB_NAME = 1
+ TBL_NAME = 2
+ PART_NAME = 3
+ DELETEDATA = 4
+ ENVIRONMENT_CONTEXT = 5
+
+ FIELDS = {
+ DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
+ TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'},
+ PART_NAME => {:type => ::Thrift::Types::STRING, :name => 'part_name'},
+ DELETEDATA => {:type => ::Thrift::Types::BOOL, :name => 'deleteData'},
+ ENVIRONMENT_CONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 'environment_context', :class => ::EnvironmentContext}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
+ class Drop_partition_by_name_with_environment_context_result
+ include ::Thrift::Struct, ::Thrift::Struct_Union
+ SUCCESS = 0
+ O1 = 1
+ O2 = 2
+
+ FIELDS = {
+ SUCCESS => {:type => ::Thrift::Types::BOOL, :name => 'success'},
+ O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => ::NoSuchObjectException},
+ O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => ::MetaException}
+ }
+
+ def struct_fields; FIELDS; end
+
+ def validate
+ end
+
+ ::Thrift::Struct.generate_accessors self
+ end
+
    class Get_partition_args
      include ::Thrift::Struct, ::Thrift::Struct_Union
      DB_NAME = 1

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1445309&r1=1445308&r2=1445309&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Tue Feb 12 18:52:55 2013
@@ -1080,19 +1080,13 @@ public class HiveMetaStore extends Thrif
      @Override
      public void create_table(final Table tbl) throws AlreadyExistsException,
          MetaException, InvalidObjectException {
- create_table(tbl, null);
+ create_table_with_environment_context(tbl, null);
      }

      @Override
- public void create_table_with_environment_context(final Table table,
+ public void create_table_with_environment_context(final Table tbl,
          final EnvironmentContext envContext)
          throws AlreadyExistsException, MetaException, InvalidObjectException {
- create_table(table, envContext);
- }
-
- private void create_table(final Table tbl,
- final EnvironmentContext envContext) throws AlreadyExistsException,
- MetaException, InvalidObjectException {
        startFunction("create_table", ": " + tbl.toString());
        boolean success = false;
        Exception ex = null;
@@ -1126,8 +1120,9 @@ public class HiveMetaStore extends Thrif
      }

      private void drop_table_core(final RawStore ms, final String dbname, final String name,
- final boolean deleteData) throws NoSuchObjectException, MetaException, IOException,
- InvalidObjectException, InvalidInputException {
+ final boolean deleteData, final EnvironmentContext envContext)
+ throws NoSuchObjectException, MetaException, IOException,
+ InvalidObjectException, InvalidInputException {
        boolean success = false;
        boolean isExternal = false;
        Path tblPath = null;
@@ -1196,7 +1191,9 @@ public class HiveMetaStore extends Thrif
            // ok even if the data is not deleted
          }
          for (MetaStoreEventListener listener : listeners) {
- listener.onDropTable(new DropTableEvent(tbl, success, this));
+ DropTableEvent dropTableEvent = new DropTableEvent(tbl, success, this);
+ dropTableEvent.setEnvironmentContext(envContext);
+ listener.onDropTable(dropTableEvent);
          }
        }
      }
@@ -1300,14 +1297,22 @@ public class HiveMetaStore extends Thrif
        return partPaths;
      }

+ @Override
      public void drop_table(final String dbname, final String name, final boolean deleteData)
          throws NoSuchObjectException, MetaException {
+ drop_table_with_environment_context(dbname, name, deleteData, null);
+ }
+
+ @Override
+ public void drop_table_with_environment_context(final String dbname, final String name,
+ final boolean deleteData, final EnvironmentContext envContext)
+ throws NoSuchObjectException, MetaException {
        startTableFunction("drop_table", dbname, name);

        boolean success = false;
        Exception ex = null;
        try {
- drop_table_core(getMS(), dbname, name, deleteData);
+ drop_table_core(getMS(), dbname, name, deleteData, envContext);
          success = true;
        } catch (IOException e) {
          ex = e;
@@ -1464,8 +1469,8 @@ public class HiveMetaStore extends Thrif
      }

      private Partition append_partition_common(RawStore ms, String dbName, String tableName,
- List<String> part_vals) throws InvalidObjectException, AlreadyExistsException,
- MetaException {
+ List<String> part_vals, EnvironmentContext envContext) throws InvalidObjectException,
+ AlreadyExistsException, MetaException {

        Partition part = new Partition();
        boolean success = false, madeDir = false;
@@ -1535,6 +1540,7 @@ public class HiveMetaStore extends Thrif
          for (MetaStoreEventListener listener : listeners) {
            AddPartitionEvent addPartitionEvent =
                new AddPartitionEvent(tbl, part, success, this);
+ addPartitionEvent.setEnvironmentContext(envContext);
            listener.onAddPartition(addPartitionEvent);
          }
        }
@@ -1553,9 +1559,17 @@ public class HiveMetaStore extends Thrif
        }
      }

+ @Override
      public Partition append_partition(final String dbName, final String tableName,
          final List<String> part_vals) throws InvalidObjectException,
          AlreadyExistsException, MetaException {
+ return append_partition_with_environment_context(dbName, tableName, part_vals, null);
+ }
+
+ @Override
+ public Partition append_partition_with_environment_context(final String dbName,
+ final String tableName, final List<String> part_vals, final EnvironmentContext envContext)
+ throws InvalidObjectException, AlreadyExistsException, MetaException {
        startPartitionFunction("append_partition", dbName, tableName, part_vals);
        if (LOG.isDebugEnabled()) {
          for (String part : part_vals) {
@@ -1566,7 +1580,7 @@ public class HiveMetaStore extends Thrif
        Partition ret = null;
        Exception ex = null;
        try {
- ret = append_partition_common(getMS(), dbName, tableName, part_vals);
+ ret = append_partition_common(getMS(), dbName, tableName, part_vals, envContext);
        } catch (Exception e) {
          ex = e;
          if (e instanceof MetaException) {
@@ -1793,7 +1807,7 @@ public class HiveMetaStore extends Thrif
      @Override
      public Partition add_partition(final Partition part)
          throws InvalidObjectException, AlreadyExistsException, MetaException {
- return add_partition(part, null);
+ return add_partition_with_environment_context(part, null);
      }

      @Override
@@ -1801,12 +1815,6 @@ public class HiveMetaStore extends Thrif
          final Partition part, EnvironmentContext envContext)
          throws InvalidObjectException, AlreadyExistsException,
          MetaException {
- return add_partition(part, envContext);
- }
-
- private Partition add_partition(final Partition part,
- final EnvironmentContext envContext) throws InvalidObjectException,
- AlreadyExistsException, MetaException {
        startTableFunction("add_partition",
            part.getDbName(), part.getTableName());
        Partition ret = null;
@@ -1833,7 +1841,7 @@ public class HiveMetaStore extends Thrif
      }

      private boolean drop_partition_common(RawStore ms, String db_name, String tbl_name,
- List<String> part_vals, final boolean deleteData)
+ List<String> part_vals, final boolean deleteData, final EnvironmentContext envContext)
        throws MetaException, NoSuchObjectException, IOException, InvalidObjectException,
        InvalidInputException {
        boolean success = false;
@@ -1894,22 +1902,34 @@ public class HiveMetaStore extends Thrif
            }
          }
          for (MetaStoreEventListener listener : listeners) {
- listener.onDropPartition(new DropPartitionEvent(tbl, part, success, this));
+ DropPartitionEvent dropPartitionEvent = new DropPartitionEvent(tbl, part, success, this);
+ dropPartitionEvent.setEnvironmentContext(envContext);
+ listener.onDropPartition(dropPartitionEvent);
          }
        }
        return true;
      }

+ @Override
      public boolean drop_partition(final String db_name, final String tbl_name,
          final List<String> part_vals, final boolean deleteData)
          throws NoSuchObjectException, MetaException, TException {
+ return drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData,
+ null);
+ }
+
+ @Override
+ public boolean drop_partition_with_environment_context(final String db_name,
+ final String tbl_name, final List<String> part_vals, final boolean deleteData,
+ final EnvironmentContext envContext)
+ throws NoSuchObjectException, MetaException, TException {
        startPartitionFunction("drop_partition", db_name, tbl_name, part_vals);
        LOG.info("Partition values:" + part_vals);

        boolean ret = false;
        Exception ex = null;
        try {
- ret = drop_partition_common(getMS(), db_name, tbl_name, part_vals, deleteData);
+ ret = drop_partition_common(getMS(), db_name, tbl_name, part_vals, deleteData, envContext);
        } catch (IOException e) {
          ex = e;
          throw new MetaException(e.getMessage());
@@ -2274,7 +2294,7 @@ public class HiveMetaStore extends Thrif
          final Table newTable)
          throws InvalidOperationException, MetaException {
        // Do not set an environment context.
- alter_table(dbname, name, newTable, null);
+ alter_table_with_environment_context(dbname, name, newTable, null);
      }

      @Override
@@ -2282,12 +2302,6 @@ public class HiveMetaStore extends Thrif
          final String name, final Table newTable,
          final EnvironmentContext envContext)
          throws InvalidOperationException, MetaException {
- alter_table(dbname, name, newTable, envContext);
- }
-
- private void alter_table(final String dbname, final String name,
- final Table newTable, final EnvironmentContext envContext)
- throws InvalidOperationException, MetaException {
        startFunction("alter_table", ": db=" + dbname + " tbl=" + name
            + " newtbl=" + newTable.getTableName());

@@ -2609,9 +2623,17 @@ public class HiveMetaStore extends Thrif
        return ret;
      }

+ @Override
      public Partition append_partition_by_name(final String db_name, final String tbl_name,
          final String part_name) throws InvalidObjectException,
          AlreadyExistsException, MetaException, TException {
+ return append_partition_by_name_with_environment_context(db_name, tbl_name, part_name, null);
+ }
+
+ @Override
+ public Partition append_partition_by_name_with_environment_context(final String db_name,
+ final String tbl_name, final String part_name, final EnvironmentContext env_context)
+ throws InvalidObjectException, AlreadyExistsException, MetaException, TException {
        startFunction("append_partition_by_name", ": db=" + db_name + " tbl="
            + tbl_name + " part=" + part_name);

@@ -2620,7 +2642,7 @@ public class HiveMetaStore extends Thrif
        try {
          RawStore ms = getMS();
          List<String> partVals = getPartValsFromName(ms, db_name, tbl_name, part_name);
- ret = append_partition_common(ms, db_name, tbl_name, partVals);
+ ret = append_partition_common(ms, db_name, tbl_name, partVals, env_context);
        } catch (Exception e) {
          ex = e;
          if (e instanceof InvalidObjectException) {
@@ -2642,10 +2664,10 @@ public class HiveMetaStore extends Thrif
        return ret;
      }

- private boolean drop_partition_by_name_core(final RawStore ms,
- final String db_name, final String tbl_name, final String part_name,
- final boolean deleteData) throws NoSuchObjectException,
- MetaException, TException, IOException, InvalidObjectException, InvalidInputException {
+ private boolean drop_partition_by_name_core(final RawStore ms, final String db_name,
+ final String tbl_name, final String part_name, final boolean deleteData,
+ final EnvironmentContext envContext) throws NoSuchObjectException, MetaException,
+ TException, IOException, InvalidObjectException, InvalidInputException {

        List<String> partVals = null;
        try {
@@ -2654,13 +2676,22 @@ public class HiveMetaStore extends Thrif
          throw new NoSuchObjectException(e.getMessage());
        }

- return drop_partition_common(ms, db_name, tbl_name, partVals, deleteData);
+ return drop_partition_common(ms, db_name, tbl_name, partVals, deleteData, envContext);
      }

      @Override
      public boolean drop_partition_by_name(final String db_name, final String tbl_name,
          final String part_name, final boolean deleteData) throws NoSuchObjectException,
          MetaException, TException {
+ return drop_partition_by_name_with_environment_context(db_name, tbl_name, part_name,
+ deleteData, null);
+ }
+
+ @Override
+ public boolean drop_partition_by_name_with_environment_context(final String db_name,
+ final String tbl_name, final String part_name, final boolean deleteData,
+ final EnvironmentContext envContext) throws NoSuchObjectException,
+ MetaException, TException {
        startFunction("drop_partition_by_name", ": db=" + db_name + " tbl="
            + tbl_name + " part=" + part_name);

@@ -2668,7 +2699,7 @@ public class HiveMetaStore extends Thrif
        Exception ex = null;
        try {
          ret = drop_partition_by_name_core(getMS(), db_name, tbl_name,
- part_name, deleteData);
+ part_name, deleteData, envContext);
        } catch (IOException e) {
          ex = e;
          throw new MetaException(e.getMessage());

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1445309&r1=1445308&r2=1445309&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Tue Feb 12 18:52:55 2013
@@ -45,6 +45,7 @@ import org.apache.hadoop.hive.metastore.
  import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
  import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
  import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
  import org.apache.hadoop.hive.metastore.api.FieldSchema;
  import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
  import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
@@ -206,7 +207,12 @@ public class HiveMetaStoreClient impleme
     */
    public void alter_table(String dbname, String tbl_name, Table new_tbl)
        throws InvalidOperationException, MetaException, TException {
- client.alter_table(dbname, tbl_name, new_tbl);
+ alter_table(dbname, tbl_name, new_tbl, null);
+ }
+
+ public void alter_table(String dbname, String tbl_name, Table new_tbl,
+ EnvironmentContext envContext) throws InvalidOperationException, MetaException, TException {
+ client.alter_table_with_environment_context(dbname, tbl_name, new_tbl, envContext);
    }

    /**
@@ -358,7 +364,13 @@ public class HiveMetaStoreClient impleme
    public Partition add_partition(Partition new_part)
        throws InvalidObjectException, AlreadyExistsException, MetaException,
        TException {
- return deepCopy(client.add_partition(new_part));
+ return add_partition(new_part, null);
+ }
+
+ public Partition add_partition(Partition new_part, EnvironmentContext envContext)
+ throws InvalidObjectException, AlreadyExistsException, MetaException,
+ TException {
+ return deepCopy(client.add_partition_with_environment_context(new_part, envContext));
    }

    /**
@@ -390,14 +402,26 @@ public class HiveMetaStoreClient impleme
    public Partition appendPartition(String db_name, String table_name,
        List<String> part_vals) throws InvalidObjectException,
        AlreadyExistsException, MetaException, TException {
- return deepCopy(client.append_partition(db_name, table_name, part_vals));
+ return appendPartition(db_name, table_name, part_vals, null);
+ }
+
+ public Partition appendPartition(String db_name, String table_name, List<String> part_vals,
+ EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException,
+ MetaException, TException {
+ return deepCopy(client.append_partition_with_environment_context(db_name, table_name,
+ part_vals, envContext));
    }

    public Partition appendPartition(String dbName, String tableName, String partName)
- throws InvalidObjectException, AlreadyExistsException,
- MetaException, TException {
- return deepCopy(
- client.append_partition_by_name(dbName, tableName, partName));
+ throws InvalidObjectException, AlreadyExistsException, MetaException, TException {
+ return appendPartition(dbName, tableName, partName, null);
+ }
+
+ public Partition appendPartition(String dbName, String tableName, String partName,
+ EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException,
+ MetaException, TException {
+ return deepCopy(client.append_partition_by_name_with_environment_context(dbName, tableName,
+ partName, envContext));
    }

    /**
@@ -423,13 +447,18 @@ public class HiveMetaStoreClient impleme
     */
    public void createTable(Table tbl) throws AlreadyExistsException,
        InvalidObjectException, MetaException, NoSuchObjectException, TException {
+ createTable(tbl, null);
+ }
+
+ public void createTable(Table tbl, EnvironmentContext envContext) throws AlreadyExistsException,
+ InvalidObjectException, MetaException, NoSuchObjectException, TException {
      HiveMetaHook hook = getHook(tbl);
      if (hook != null) {
        hook.preCreateTable(tbl);
      }
      boolean success = false;
      try {
- client.create_table(tbl);
+ client.create_table_with_environment_context(tbl, envContext);
        if (hook != null) {
          hook.commitCreateTable(tbl);
        }
@@ -512,13 +541,25 @@ public class HiveMetaStoreClient impleme
    public boolean dropPartition(String db_name, String tbl_name,
        List<String> part_vals) throws NoSuchObjectException, MetaException,
        TException {
- return dropPartition(db_name, tbl_name, part_vals, true);
+ return dropPartition(db_name, tbl_name, part_vals, true, null);
+ }
+
+ public boolean dropPartition(String db_name, String tbl_name, List<String> part_vals,
+ EnvironmentContext env_context) throws NoSuchObjectException, MetaException, TException {
+ return dropPartition(db_name, tbl_name, part_vals, true, env_context);
    }

    public boolean dropPartition(String dbName, String tableName, String partName, boolean deleteData)
        throws NoSuchObjectException, MetaException, TException {
- return client.drop_partition_by_name(dbName, tableName, partName, deleteData);
+ return dropPartition(dbName, tableName, partName, deleteData, null);
+ }
+
+ public boolean dropPartition(String dbName, String tableName, String partName, boolean deleteData,
+ EnvironmentContext envContext) throws NoSuchObjectException, MetaException, TException {
+ return client.drop_partition_by_name_with_environment_context(dbName, tableName, partName,
+ deleteData, envContext);
    }
+
    /**
     * @param db_name
     * @param tbl_name
@@ -535,7 +576,14 @@ public class HiveMetaStoreClient impleme
    public boolean dropPartition(String db_name, String tbl_name,
        List<String> part_vals, boolean deleteData) throws NoSuchObjectException,
        MetaException, TException {
- return client.drop_partition(db_name, tbl_name, part_vals, deleteData);
+ return dropPartition(db_name, tbl_name, part_vals, deleteData, null);
+ }
+
+ public boolean dropPartition(String db_name, String tbl_name, List<String> part_vals,
+ boolean deleteData, EnvironmentContext envContext) throws NoSuchObjectException,
+ MetaException, TException {
+ return client.drop_partition_with_environment_context(db_name, tbl_name, part_vals, deleteData,
+ envContext);
    }

    /**
@@ -550,14 +598,14 @@ public class HiveMetaStoreClient impleme
     */
    public void dropTable(String dbname, String name)
        throws NoSuchObjectException, MetaException, TException {
- dropTable(dbname, name, true, true);
+ dropTable(dbname, name, true, true, null);
    }

    /** {@inheritDoc} */
    @Deprecated
    public void dropTable(String tableName, boolean deleteData)
        throws MetaException, UnknownTableException, TException, NoSuchObjectException {
- dropTable(DEFAULT_DATABASE_NAME, tableName, deleteData, false);
+ dropTable(DEFAULT_DATABASE_NAME, tableName, deleteData, false, null);
    }

    /**
@@ -573,14 +621,19 @@ public class HiveMetaStoreClient impleme
     * java.lang.String, boolean)
     */
    public void dropTable(String dbname, String name, boolean deleteData,
- boolean ignoreUknownTab) throws MetaException, TException,
+ boolean ignoreUnknownTab) throws MetaException, TException,
        NoSuchObjectException, UnsupportedOperationException {
+ dropTable(dbname, name, deleteData, ignoreUnknownTab, null);
+ }

+ public void dropTable(String dbname, String name, boolean deleteData,
+ boolean ignoreUnknownTab, EnvironmentContext envContext) throws MetaException, TException,
+ NoSuchObjectException, UnsupportedOperationException {
      Table tbl;
      try {
        tbl = getTable(dbname, name);
      } catch (NoSuchObjectException e) {
- if (!ignoreUknownTab) {
+ if (!ignoreUnknownTab) {
          throw e;
        }
        return;
@@ -594,13 +647,13 @@ public class HiveMetaStoreClient impleme
      }
      boolean success = false;
      try {
- client.drop_table(dbname, name, deleteData);
+ client.drop_table_with_environment_context(dbname, name, deleteData, envContext);
        if (hook != null) {
          hook.commitDropTable(tbl, deleteData);
        }
        success=true;
      } catch (NoSuchObjectException e) {
- if (!ignoreUknownTab) {
+ if (!ignoreUnknownTab) {
          throw e;
        }
      } finally {
@@ -1038,13 +1091,26 @@ public class HiveMetaStoreClient impleme

    public Partition appendPartitionByName(String dbName, String tableName, String partName)
        throws InvalidObjectException, AlreadyExistsException, MetaException, TException {
- return deepCopy(
- client.append_partition_by_name(dbName, tableName, partName));
+ return appendPartitionByName(dbName, tableName, partName, null);
    }

- public boolean dropPartitionByName(String dbName, String tableName, String partName, boolean deleteData)
- throws NoSuchObjectException, MetaException, TException {
- return client.drop_partition_by_name(dbName, tableName, partName, deleteData);
+ public Partition appendPartitionByName(String dbName, String tableName, String partName,
+ EnvironmentContext envContext) throws InvalidObjectException, AlreadyExistsException,
+ MetaException, TException {
+ return deepCopy(client.append_partition_by_name_with_environment_context(dbName, tableName,
+ partName, envContext));
+ }
+
+ public boolean dropPartitionByName(String dbName, String tableName, String partName,
+ boolean deleteData) throws NoSuchObjectException, MetaException, TException {
+ return dropPartitionByName(dbName, tableName, partName, deleteData, null);
+ }
+
+ public boolean dropPartitionByName(String dbName, String tableName, String partName,
+ boolean deleteData, EnvironmentContext envContext) throws NoSuchObjectException,
+ MetaException, TException {
+ return client.drop_partition_by_name_with_environment_context(dbName, tableName, partName,
+ deleteData, envContext);
    }

    private HiveMetaHook getHook(Table tbl) throws MetaException {

Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java?rev=1445309&view=auto
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java (added)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreWithEnvironmentContext.java Tue Feb 12 18:52:55 2013
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
+import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
+import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.DropTableEvent;
+import org.apache.hadoop.hive.metastore.events.ListenerEvent;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.mortbay.log.Log;
+
+/**
+ * TestHiveMetaStoreWithEnvironmentContext. Test case for _with_environment_context
+ * calls in {@link org.apache.hadoop.hive.metastore.HiveMetaStore}
+ */
+public class TestHiveMetaStoreWithEnvironmentContext extends TestCase {
+
+ private HiveConf hiveConf;
+ private HiveMetaStoreClient msc;
+ private EnvironmentContext envContext;
+ private final Database db = new Database();
+ private Table table = new Table();
+ private final Partition partition = new Partition();
+
+ private static final String dbName = "tmpdb";
+ private static final String tblName = "tmptbl";
+ private static final String renamed = "tmptbl2";
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+
+ System.setProperty("hive.metastore.event.listeners",
+ DummyListener.class.getName());
+
+ int port = MetaStoreUtils.findFreePort();
+ MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
+
+ hiveConf = new HiveConf(this.getClass());
+ hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
+ hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
+ hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+ hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+ hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+ SessionState.start(new CliSessionState(hiveConf));
+ msc = new HiveMetaStoreClient(hiveConf, null);
+
+ msc.dropDatabase(dbName, true, true);
+
+ Map<String, String> envProperties = new HashMap<String, String>();
+ envProperties.put("hadoop.job.ugi", "test_user");
+ envContext = new EnvironmentContext(envProperties);
+
+ db.setName(dbName);
+
+ Map<String, String> tableParams = new HashMap<String, String>();
+ tableParams.put("a", "string");
+ List<FieldSchema> partitionKeys = new ArrayList<FieldSchema>();
+ partitionKeys.add(new FieldSchema("b", "string", ""));
+
+ List<FieldSchema> cols = new ArrayList<FieldSchema>();
+ cols.add(new FieldSchema("a", "string", ""));
+ cols.add(new FieldSchema("b", "string", ""));
+ StorageDescriptor sd = new StorageDescriptor();
+ sd.setCols(cols);
+ sd.setCompressed(false);
+ sd.setParameters(tableParams);
+ sd.setSerdeInfo(new SerDeInfo());
+ sd.getSerdeInfo().setName(tblName);
+ sd.getSerdeInfo().setParameters(new HashMap<String, String>());
+ sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
+
+ table.setDbName(dbName);
+ table.setTableName(tblName);
+ table.setParameters(tableParams);
+ table.setPartitionKeys(partitionKeys);
+ table.setSd(sd);
+
+ List<String> partValues = new ArrayList<String>();
+ partValues.add("2011");
+ partition.setDbName(dbName);
+ partition.setTableName(tblName);
+ partition.setValues(partValues);
+ partition.setSd(table.getSd().deepCopy());
+ partition.getSd().setSerdeInfo(table.getSd().getSerdeInfo().deepCopy());
+
+ DummyListener.notifyList.clear();
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ super.tearDown();
+ }
+
+ public void testEnvironmentContext() throws Exception {
+ int listSize = 0;
+
+ List<ListenerEvent> notifyList = DummyListener.notifyList;
+ assertEquals(notifyList.size(), listSize);
+ msc.createDatabase(db);
+ listSize++;
+ assertEquals(listSize, notifyList.size());
+ CreateDatabaseEvent dbEvent = (CreateDatabaseEvent)(notifyList.get(listSize - 1));
+ assert dbEvent.getStatus();
+
+ Log.debug("Creating table");
+ msc.createTable(table, envContext);
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+ CreateTableEvent tblEvent = (CreateTableEvent)(notifyList.get(listSize - 1));
+ assert tblEvent.getStatus();
+ assertEquals(envContext, tblEvent.getEnvironmentContext());
+
+ table = msc.getTable(dbName, tblName);
+
+ Log.debug("Adding partition");
+ partition.getSd().setLocation(table.getSd().getLocation() + "/part1");
+ msc.add_partition(partition, envContext);
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+ AddPartitionEvent partEvent = (AddPartitionEvent)(notifyList.get(listSize-1));
+ assert partEvent.getStatus();
+ assertEquals(envContext, partEvent.getEnvironmentContext());
+
+ Log.debug("Appending partition");
+ List<String> partVals = new ArrayList<String>();
+ partVals.add("2012");
+ msc.appendPartition(dbName, tblName, partVals, envContext);
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+ AddPartitionEvent appendPartEvent = (AddPartitionEvent)(notifyList.get(listSize-1));
+ assert appendPartEvent.getStatus();
+ assertEquals(envContext, appendPartEvent.getEnvironmentContext());
+
+ Log.debug("Renaming table");
+ table.setTableName(renamed);
+ msc.alter_table(dbName, tblName, table, envContext);
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+ AlterTableEvent alterTableEvent = (AlterTableEvent) notifyList.get(listSize-1);
+ assert alterTableEvent.getStatus();
+ assertEquals(envContext, alterTableEvent.getEnvironmentContext());
+
+ Log.debug("Renaming table back");
+ table.setTableName(tblName);
+ msc.alter_table(dbName, renamed, table, envContext);
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+
+ Log.debug("Dropping partition");
+ List<String> dropPartVals = new ArrayList<String>();
+ dropPartVals.add("2011");
+ msc.dropPartition(dbName, tblName, dropPartVals, envContext);
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+ DropPartitionEvent dropPartEvent = (DropPartitionEvent)notifyList.get(listSize - 1);
+ assert dropPartEvent.getStatus();
+ assertEquals(envContext, dropPartEvent.getEnvironmentContext());
+
+ Log.debug("Dropping partition by name");
+ msc.dropPartition(dbName, tblName, "b=2012", true, envContext);
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+ DropPartitionEvent dropPartByNameEvent = (DropPartitionEvent)notifyList.get(listSize - 1);
+ assert dropPartByNameEvent.getStatus();
+ assertEquals(envContext, dropPartByNameEvent.getEnvironmentContext());
+
+ Log.debug("Dropping table");
+ msc.dropTable(dbName, tblName, true, false, envContext);
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+ DropTableEvent dropTblEvent = (DropTableEvent)notifyList.get(listSize-1);
+ assert dropTblEvent.getStatus();
+ assertEquals(envContext, dropTblEvent.getEnvironmentContext());
+
+ msc.dropDatabase(dbName);
+ listSize++;
+ assertEquals(notifyList.size(), listSize);
+
+ DropDatabaseEvent dropDB = (DropDatabaseEvent)notifyList.get(listSize-1);
+ assert dropDB.getStatus();
+ }
+
+}

Search Discussions

Discussion Posts

Previous

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 8 of 8 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedFeb 12, '13 at 6:53p
activeFeb 12, '13 at 6:53p
posts8
users1
websitehive.apache.org

1 user in discussion

Kevinwilfong: 8 posts

People

Translate

site design / logo © 2021 Grokbase