FAQ
Author: thejas
Date: Fri Dec 13 00:12:01 2013
New Revision: 1550601

URL: http://svn.apache.org/r1550601
Log:
HIVE-2093 : create/drop database should populate inputs/outputs and check concurrency and user permission (Navis via Thejas Nair)

Added:
     hive/trunk/ql/src/test/queries/clientnegative/authorization_fail_create_db.q
     hive/trunk/ql/src/test/queries/clientnegative/authorization_fail_drop_db.q
     hive/trunk/ql/src/test/queries/clientnegative/lockneg_query_tbl_in_locked_db.q
     hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_db_lock_conflict.q
     hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_drop_locked_db.q
     hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_lock_db_in_use.q
     hive/trunk/ql/src/test/results/clientnegative/authorization_fail_create_db.q.out
     hive/trunk/ql/src/test/results/clientnegative/authorization_fail_drop_db.q.out
     hive/trunk/ql/src/test/results/clientnegative/lockneg_query_tbl_in_locked_db.q.out
     hive/trunk/ql/src/test/results/clientnegative/lockneg_try_db_lock_conflict.q.out
     hive/trunk/ql/src/test/results/clientnegative/lockneg_try_drop_locked_db.q.out
     hive/trunk/ql/src/test/results/clientnegative/lockneg_try_lock_db_in_use.q.out
Modified:
     hive/trunk/hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out
     hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java
     hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java
     hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java
     hive/trunk/ql/src/test/results/clientnegative/authorization_fail_2.q.out
     hive/trunk/ql/src/test/results/clientnegative/authorization_fail_3.q.out
     hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out
     hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out
     hive/trunk/ql/src/test/results/clientnegative/authorization_fail_6.q.out
     hive/trunk/ql/src/test/results/clientnegative/authorization_fail_7.q.out
     hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out
     hive/trunk/ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out
     hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty.q.out
     hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out
     hive/trunk/ql/src/test/results/clientnegative/exim_22_export_authfail.q.out
     hive/trunk/ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out
     hive/trunk/ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out
     hive/trunk/ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out
     hive/trunk/ql/src/test/results/clientnegative/join_nonexistent_part.q.out
     hive/trunk/ql/src/test/results/clientnegative/load_exist_part_authfail.q.out
     hive/trunk/ql/src/test/results/clientnegative/load_nonpart_authfail.q.out
     hive/trunk/ql/src/test/results/clientnegative/load_part_authfail.q.out
     hive/trunk/ql/src/test/results/clientpositive/alter1.q.out
     hive/trunk/ql/src/test/results/clientpositive/alter2.q.out
     hive/trunk/ql/src/test/results/clientpositive/alter4.q.out
     hive/trunk/ql/src/test/results/clientpositive/authorization_5.q.out
     hive/trunk/ql/src/test/results/clientpositive/database.q.out
     hive/trunk/ql/src/test/results/clientpositive/database_drop.q.out
     hive/trunk/ql/src/test/results/clientpositive/describe_database_json.q.out
     hive/trunk/ql/src/test/results/clientpositive/drop_database_removes_partition_dirs.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_00_nonpart_empty.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_01_nonpart.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_02_00_part_empty.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_02_part.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_03_nonpart_over_compat.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_04_all_part.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_05_some_part.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_06_one_part.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_07_all_part_over_nonoverlap.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_08_nonpart_rename.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_09_part_spec_nonoverlap.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_10_external_managed.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_11_managed_external.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_12_external_location.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_13_managed_location.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_14_managed_location_over_existing.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_15_external_part.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_16_part_external.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_17_part_managed.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_18_part_external.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_19_00_part_external_location.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_19_part_external_location.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_20_part_managed_location.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_22_import_exist_authsuccess.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_23_import_part_authsuccess.q.out
     hive/trunk/ql/src/test/results/clientpositive/exim_24_import_nonexist_authsuccess.q.out
     hive/trunk/ql/src/test/results/clientpositive/input46.q.out
     hive/trunk/ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out
     hive/trunk/ql/src/test/results/clientpositive/show_create_table_db_table.q.out

Modified: hive/trunk/hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out (original)
+++ hive/trunk/hbase-handler/src/test/results/negative/cascade_dbdrop_hadoop20.q.out Fri Dec 13 00:12:01 2013
@@ -37,7 +37,11 @@ Found 3 items
  #### A masked pattern was here ####
  PREHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE
  PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:hbasedb
+PREHOOK: Output: database:hbasedb
  POSTHOOK: query: DROP DATABASE IF EXISTS hbaseDB CASCADE
  POSTHOOK: type: DROPDATABASE
+POSTHOOK: Input: database:hbasedb
+POSTHOOK: Output: database:hbasedb
  Command failed with exit code = -1
  Query returned non-zero code: -1, cause: null

Modified: hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java (original)
+++ hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/security/HdfsAuthorizationProvider.java Fri Dec 13 00:12:01 2013
@@ -93,7 +93,7 @@ public class HdfsAuthorizationProvider e

      switch (priv.getPriv()) {
      case ALL:
- throw new AuthorizationException("no matching Action for Privilege.All");
+ return FsAction.READ_WRITE;
      case ALTER_DATA:
        return FsAction.WRITE;
      case ALTER_METADATA:

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestClientSideAuthorizationProvider.java Fri Dec 13 00:12:01 2013
@@ -118,11 +118,15 @@ public class TestClientSideAuthorization

      String userName = ugi.getUserName();

+ allowCreateDatabase(userName);
+
      CommandProcessorResponse ret = driver.run("create database " + dbName);
      assertEquals(0,ret.getResponseCode());
      Database db = msc.getDatabase(dbName);
      String dbLocn = db.getLocationUri();

+ disallowCreateDatabase(userName);
+
      validateCreateDb(db,dbName);
      disallowCreateInDb(dbName, userName, dbLocn);

@@ -183,6 +187,15 @@ public class TestClientSideAuthorization
      // nothing needed here by default
    }

+ protected void allowCreateDatabase(String userName)
+ throws Exception {
+ driver.run("grant create to user "+userName);
+ }
+
+ protected void disallowCreateDatabase(String userName)
+ throws Exception {
+ driver.run("revoke create from user "+userName);
+ }

    protected void allowCreateInDb(String dbName, String userName, String location)
        throws Exception {

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestMetastoreAuthorizationProvider.java Fri Dec 13 00:12:01 2013
@@ -137,6 +137,8 @@ public class TestMetastoreAuthorizationP
      String tblName = getTestTableName();
      String userName = ugi.getUserName();

+ allowCreateDatabase(userName);
+
      CommandProcessorResponse ret = driver.run("create database " + dbName);
      assertEquals(0,ret.getResponseCode());
      Database db = msc.getDatabase(dbName);
@@ -145,6 +147,8 @@ public class TestMetastoreAuthorizationP
      validateCreateDb(db,dbName);
      disallowCreateInDb(dbName, userName, dbLocn);

+ disallowCreateDatabase(userName);
+
      driver.run("use " + dbName);
      ret = driver.run(
          String.format("create table %s (a string) partitioned by (b string)", tblName));
@@ -248,6 +252,16 @@ public class TestMetastoreAuthorizationP

    }

+ protected void allowCreateDatabase(String userName)
+ throws Exception {
+ driver.run("grant create to user "+userName);
+ }
+
+ protected void disallowCreateDatabase(String userName)
+ throws Exception {
+ driver.run("revoke create from user "+userName);
+ }
+
    protected void allowCreateInTbl(String tableName, String userName, String location)
        throws Exception{
      driver.run("grant create on table "+tableName+" to user "+userName);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Fri Dec 13 00:12:01 2013
@@ -41,6 +41,7 @@ import org.apache.hadoop.fs.FSDataInputS
  import org.apache.hadoop.hive.conf.HiveConf;
  import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
  import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.Database;
  import org.apache.hadoop.hive.metastore.api.FieldSchema;
  import org.apache.hadoop.hive.metastore.api.Schema;
  import org.apache.hadoop.hive.ql.exec.ConditionalTask;
@@ -53,6 +54,7 @@ import org.apache.hadoop.hive.ql.exec.Ta
  import org.apache.hadoop.hive.ql.exec.TaskRunner;
  import org.apache.hadoop.hive.ql.exec.Utilities;
  import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
+import org.apache.hadoop.hive.ql.hooks.Entity;
  import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
  import org.apache.hadoop.hive.ql.hooks.Hook;
  import org.apache.hadoop.hive.ql.hooks.HookContext;
@@ -476,9 +478,10 @@ public class Driver implements CommandPr
            perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION);
            doAuthorization(sem);
          } catch (AuthorizationException authExp) {
- errorMessage = "Authorization failed:" + authExp.getMessage()
- + ". Use show grant to get more details.";
- console.printError(errorMessage);
+ console.printError("Authorization failed:" + authExp.getMessage()
+ + ". Use SHOW GRANT to get more details.");
+ errorMessage = authExp.getMessage();
+ SQLState = "42000";
            return 403;
          } finally {
            perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.DO_AUTHORIZATION);
@@ -521,7 +524,10 @@ public class Driver implements CommandPr
      HiveOperation op = ss.getHiveOperation();
      Hive db = sem.getDb();
      if (op != null) {
- if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
+ if (op.equals(HiveOperation.CREATEDATABASE)) {
+ ss.getAuthorizer().authorize(
+ op.getInputRequiredPrivileges(), op.getOutputRequiredPrivileges());
+ } else if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
op.equals(HiveOperation.CREATETABLE)) {
          ss.getAuthorizer().authorize(
              db.getDatabase(SessionState.get().getCurrentDatabase()), null,
@@ -538,6 +544,11 @@ public class Driver implements CommandPr
        }
        if (outputs != null && outputs.size() > 0) {
          for (WriteEntity write : outputs) {
+ if (write.getType() == Entity.Type.DATABASE) {
+ ss.getAuthorizer().authorize(write.getDatabase(),
+ null, op.getOutputRequiredPrivileges());
+ continue;
+ }

            if (write.getType() == WriteEntity.Type.PARTITION) {
              Partition part = db.getPartition(write.getTable(), write
@@ -565,6 +576,9 @@ public class Driver implements CommandPr

        Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
        for (ReadEntity read : inputs) {
+ if (read.getType() == Entity.Type.DATABASE) {
+ continue;
+ }
          Table tbl = read.getTable();
          if ((read.getPartition() != null) || (tbl.isPartitioned())) {
            String tblName = tbl.getTableName();
@@ -634,6 +648,10 @@ public class Driver implements CommandPr
        // cache the results for table authorization
        Set<String> tableAuthChecked = new HashSet<String>();
        for (ReadEntity read : inputs) {
+ if (read.getType() == Entity.Type.DATABASE) {
+ ss.getAuthorizer().authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null);
+ continue;
+ }
          Table tbl = read.getTable();
          if (read.getPartition() != null) {
            Partition partition = read.getPartition();
@@ -681,6 +699,8 @@ public class Driver implements CommandPr
    }

    /**
+ * @param d
+ * The database to be locked
     * @param t
     * The table to be locked
     * @param p
@@ -689,8 +709,8 @@ public class Driver implements CommandPr
     * The mode of the lock (SHARED/EXCLUSIVE) Get the list of objects to be locked. If a
     * partition needs to be locked (in any mode), all its parents should also be locked in
     * SHARED mode.
- **/
- private List<HiveLockObj> getLockObjects(Table t, Partition p, HiveLockMode mode)
+ */
+ private List<HiveLockObj> getLockObjects(Database d, Table t, Partition p, HiveLockMode mode)
        throws SemanticException {
      List<HiveLockObj> locks = new LinkedList<HiveLockObj>();

@@ -699,8 +719,13 @@ public class Driver implements CommandPr
                               String.valueOf(System.currentTimeMillis()),
                               "IMPLICIT",
                               plan.getQueryStr());
+ if (d != null) {
+ locks.add(new HiveLockObj(new HiveLockObject(d.getName(), lockData), mode));
+ return locks;
+ }

      if (t != null) {
+ locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode));
        locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode));
        mode = HiveLockMode.SHARED;
        locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode));
@@ -708,6 +733,7 @@ public class Driver implements CommandPr
      }

      if (p != null) {
+ locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode));
        if (!(p instanceof DummyPartition)) {
          locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode));
        }
@@ -747,6 +773,7 @@ public class Driver implements CommandPr
        locks.add(new HiveLockObj(new HiveLockObject(p.getTable(), lockData), mode));
        locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode));
      }
+
      return locks;
    }

@@ -794,24 +821,29 @@ public class Driver implements CommandPr
        // If a lock needs to be acquired on any partition, a read lock needs to be acquired on all
        // its parents also
        for (ReadEntity input : plan.getInputs()) {
- if (input.getType() == ReadEntity.Type.TABLE) {
- lockObjects.addAll(getLockObjects(input.getTable(), null, HiveLockMode.SHARED));
+ if (input.getType() == ReadEntity.Type.DATABASE) {
+ lockObjects.addAll(getLockObjects(input.getDatabase(), null, null, HiveLockMode.SHARED));
+ } else if (input.getType() == ReadEntity.Type.TABLE) {
+ lockObjects.addAll(getLockObjects(null, input.getTable(), null, HiveLockMode.SHARED));
          } else {
- lockObjects.addAll(getLockObjects(null, input.getPartition(), HiveLockMode.SHARED));
+ lockObjects.addAll(getLockObjects(null, null, input.getPartition(), HiveLockMode.SHARED));
          }
        }

        for (WriteEntity output : plan.getOutputs()) {
          List<HiveLockObj> lockObj = null;
- if (output.getTyp() == WriteEntity.Type.TABLE) {
- lockObj = getLockObjects(output.getTable(), null,
+ if (output.getType() == WriteEntity.Type.DATABASE) {
+ lockObjects.addAll(getLockObjects(output.getDatabase(), null, null,
+ output.isComplete() ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED));
+ } else if (output.getTyp() == WriteEntity.Type.TABLE) {
+ lockObj = getLockObjects(null, output.getTable(), null,
                output.isComplete() ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED);
          } else if (output.getTyp() == WriteEntity.Type.PARTITION) {
- lockObj = getLockObjects(null, output.getPartition(), HiveLockMode.EXCLUSIVE);
+ lockObj = getLockObjects(null, null, output.getPartition(), HiveLockMode.EXCLUSIVE);
          }
          // In case of dynamic queries, it is possible to have incomplete dummy partitions
          else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) {
- lockObj = getLockObjects(null, output.getPartition(), HiveLockMode.SHARED);
+ lockObj = getLockObjects(null, null, output.getPartition(), HiveLockMode.SHARED);
          }

          if(lockObj != null) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Fri Dec 13 00:12:01 2013
@@ -130,6 +130,7 @@ import org.apache.hadoop.hive.ql.plan.Dr
  import org.apache.hadoop.hive.ql.plan.DropTableDesc;
  import org.apache.hadoop.hive.ql.plan.GrantDesc;
  import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL;
+import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc;
  import org.apache.hadoop.hive.ql.plan.LockTableDesc;
  import org.apache.hadoop.hive.ql.plan.MsckDesc;
  import org.apache.hadoop.hive.ql.plan.PartitionSpec;
@@ -152,6 +153,7 @@ import org.apache.hadoop.hive.ql.plan.Sh
  import org.apache.hadoop.hive.ql.plan.ShowTblPropertiesDesc;
  import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc;
  import org.apache.hadoop.hive.ql.plan.TruncateTableDesc;
+import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc;
  import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
  import org.apache.hadoop.hive.ql.plan.api.StageType;
  import org.apache.hadoop.hive.ql.security.authorization.Privilege;
@@ -233,6 +235,16 @@ public class DDLTask extends Task<DDLWor
          return dropDatabase(db, dropDatabaseDesc);
        }

+ LockDatabaseDesc lockDatabaseDesc = work.getLockDatabaseDesc();
+ if (lockDatabaseDesc != null) {
+ return lockDatabase(lockDatabaseDesc);
+ }
+
+ UnlockDatabaseDesc unlockDatabaseDesc = work.getUnlockDatabaseDesc();
+ if (unlockDatabaseDesc != null) {
+ return unlockDatabase(unlockDatabaseDesc);
+ }
+
        SwitchDatabaseDesc switchDatabaseDesc = work.getSwitchDatabaseDesc();
        if (switchDatabaseDesc != null) {
          return switchDatabase(db, switchDatabaseDesc);
@@ -2468,7 +2480,7 @@ public class DDLTask extends Task<DDLWor

      HiveLockMode mode = HiveLockMode.valueOf(lockTbl.getMode());
      String tabName = lockTbl.getTableName();
- Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tabName);
+ Table tbl = db.getTable(tabName);
      if (tbl == null) {
        throw new HiveException("Table " + tabName + " does not exist ");
      }
@@ -2499,6 +2511,78 @@ public class DDLTask extends Task<DDLWor
      return 0;
    }

+ /**
+ * Lock the database
+ *
+ * @param lockDb
+ * the database to be locked along with the mode
+ * @return Returns 0 when execution succeeds and above 0 if it fails.
+ * @throws HiveException
+ * Throws this exception if an unexpected error occurs.
+ */
+ private int lockDatabase(LockDatabaseDesc lockDb) throws HiveException {
+ Context ctx = driverContext.getCtx();
+ HiveLockManager lockMgr = ctx.getHiveLockMgr();
+ if (lockMgr == null) {
+ throw new HiveException("lock Database LockManager not specified");
+ }
+
+ HiveLockMode mode = HiveLockMode.valueOf(lockDb.getMode());
+ String dbName = lockDb.getDatabaseName();
+
+ Database dbObj = db.getDatabase(dbName);
+ if (dbObj == null) {
+ throw new HiveException("Database " + dbName + " does not exist ");
+ }
+
+ HiveLockObjectData lockData =
+ new HiveLockObjectData(lockDb.getQueryId(),
+ String.valueOf(System.currentTimeMillis()),
+ "EXPLICIT", lockDb.getQueryStr());
+
+ HiveLock lck = lockMgr.lock(new HiveLockObject(dbObj.getName(), lockData), mode, true);
+ if (lck == null) {
+ return 1;
+ }
+ return 0;
+ }
+
+ /**
+ * Unlock the database specified
+ *
+ * @param unlockDb
+ * the database to be unlocked
+ * @return Returns 0 when execution succeeds and above 0 if it fails.
+ * @throws HiveException
+ * Throws this exception if an unexpected error occurs.
+ */
+ private int unlockDatabase(UnlockDatabaseDesc unlockDb) throws HiveException {
+ Context ctx = driverContext.getCtx();
+ HiveLockManager lockMgr = ctx.getHiveLockMgr();
+ if (lockMgr == null) {
+ throw new HiveException("unlock Database LockManager not specified");
+ }
+
+ String dbName = unlockDb.getDatabaseName();
+
+ Database dbObj = db.getDatabase(dbName);
+ if (dbObj == null) {
+ throw new HiveException("Database " + dbName + " does not exist ");
+ }
+ HiveLockObject obj = new HiveLockObject(dbObj.getName(), null);
+
+ List<HiveLock> locks = lockMgr.getLocks(obj, false, false);
+ if ((locks == null) || (locks.isEmpty())) {
+ throw new HiveException("Database " + dbName + " is not locked ");
+ }
+
+ for (HiveLock lock: locks) {
+ lockMgr.unlock(lock);
+
+ }
+ return 0;
+ }
+
    private HiveLockObject getHiveObject(String tabName,
                                         Map<String, String> partSpec) throws HiveException {
      Table tbl = db.getTable(tabName);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java Fri Dec 13 00:12:01 2013
@@ -284,7 +284,7 @@ public class MoveTask extends Task<MoveW
            db.loadTable(new Path(tbd.getSourceDir()), tbd.getTable()
                .getTableName(), tbd.getReplace(), tbd.getHoldDDLTime());
            if (work.getOutputs() != null) {
- work.getOutputs().add(new WriteEntity(table, true));
+ work.getOutputs().add(new WriteEntity(table));
            }
          } else {
            LOG.info("Partition is: " + tbd.getPartitionSpec().toString());
@@ -376,7 +376,7 @@ public class MoveTask extends Task<MoveW
                  updatePartitionBucketSortColumns(table, partn, bucketCols, numBuckets, sortCols);
                }

- WriteEntity enty = new WriteEntity(partn, true);
+ WriteEntity enty = new WriteEntity(partn);
                if (work.getOutputs() != null) {
                  work.getOutputs().add(enty);
                }
@@ -417,7 +417,7 @@ public class MoveTask extends Task<MoveW
             dc = new DataContainer(table.getTTable(), partn.getTPartition());
             // add this partition to post-execution hook
             if (work.getOutputs() != null) {
- work.getOutputs().add(new WriteEntity(partn, true));
+ work.getOutputs().add(new WriteEntity(partn));
             }
           }
          }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java Fri Dec 13 00:12:01 2013
@@ -22,11 +22,10 @@ import java.io.Serializable;
  import java.net.URI;
  import java.util.Map;

+import org.apache.hadoop.hive.metastore.api.Database;
  import org.apache.hadoop.hive.ql.metadata.Partition;
  import org.apache.hadoop.hive.ql.metadata.DummyPartition;
  import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.conf.HiveConf;

  /**
   * This class encapsulates an object that is being read or written to by the
@@ -40,8 +39,13 @@ public class Entity implements Serializa
     * The type of the entity.
     */
    public static enum Type {
- TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR
- };
+ DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR
+ }
+
+ /**
+ * The database if this is a database.
+ */
+ private Database database;

    /**
     * The type.
@@ -80,7 +84,7 @@ public class Entity implements Serializa
    }

    public void setComplete(boolean complete) {
- this.complete = complete;;
+ this.complete = complete;
    }

    public String getName() {
@@ -91,6 +95,14 @@ public class Entity implements Serializa
      this.name = name;
    }

+ public Database getDatabase() {
+ return database;
+ }
+
+ public void setDatabase(Database database) {
+ this.database = database;
+ }
+
    public Type getTyp() {
      return typ;
    }
@@ -130,15 +142,26 @@ public class Entity implements Serializa
    }

    /**
+ * Constructor for a database.
+ *
+ * @param database
+ * Database that is read or written to.
+ * @param complete
+ * Means the database is target, not for table or partition, etc.
+ */
+ public Entity(Database database, boolean complete) {
+ this.database = database;
+ this.typ = Type.DATABASE;
+ this.name = computeName();
+ this.complete = complete;
+ }
+
+ /**
     * Constructor for a table.
     *
     * @param t
     * Table that is read or written to.
     */
- public Entity(Table t) {
- this(t, true);
- }
-
    public Entity(Table t, boolean complete) {
      d = null;
      p = null;
@@ -154,10 +177,6 @@ public class Entity implements Serializa
     * @param p
     * Partition that is read or written to.
     */
- public Entity(Partition p) {
- this(p, true);
- }
-
    public Entity(Partition p, boolean complete) {
      d = null;
      this.p = p;
@@ -176,18 +195,6 @@ public class Entity implements Serializa
      this.complete = complete;
    }

- /**
- * Constructor for a file.
- *
- * @param d
- * The name of the directory that is being read or written to.
- * @param islocal
- * Flag to decide whether this directory is local or in dfs.
- */
- public Entity(String d, boolean islocal) {
- this(d, islocal, true);
- }
-
    public Entity(String d, boolean islocal, boolean complete) {
      this.d = d;
      p = null;
@@ -223,6 +230,11 @@ public class Entity implements Serializa
     * Get the location of the entity.
     */
    public URI getLocation() throws Exception {
+ if (typ == Type.DATABASE) {
+ String location = database.getLocationUri();
+ return location == null ? null : new URI(location);
+ }
+
      if (typ == Type.TABLE) {
        return t.getDataLocation();
      }
@@ -262,6 +274,8 @@ public class Entity implements Serializa

    private String computeName() {
      switch (typ) {
+ case DATABASE:
+ return "database:" + database.getName();
      case TABLE:
        return t.getDbName() + "@" + t.getTableName();
      case PARTITION:

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java Fri Dec 13 00:12:01 2013
@@ -22,6 +22,7 @@ import java.io.Serializable;
  import java.util.HashSet;
  import java.util.Set;

+import org.apache.hadoop.hive.metastore.api.Database;
  import org.apache.hadoop.hive.ql.metadata.Partition;
  import org.apache.hadoop.hive.ql.metadata.Table;

@@ -46,13 +47,20 @@ public class ReadEntity extends Entity i
    }

    /**
+ * Constructor for a database.
+ */
+ public ReadEntity(Database database) {
+ super(database, true);
+ }
+
+ /**
     * Constructor.
     *
     * @param t
     * The Table that the query reads from.
     */
    public ReadEntity(Table t) {
- super(t);
+ super(t, true);
    }

    private void initParent(ReadEntity parent) {
@@ -62,7 +70,7 @@ public class ReadEntity extends Entity i
    }

    public ReadEntity(Table t, ReadEntity parent) {
- super(t);
+ super(t, true);
      initParent(parent);
    }

@@ -73,11 +81,11 @@ public class ReadEntity extends Entity i
     * The partition that the query reads from.
     */
    public ReadEntity(Partition p) {
- super(p);
+ super(p, true);
    }

    public ReadEntity(Partition p, ReadEntity parent) {
- super(p);
+ super(p, true);
      initParent(parent);
    }


Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java Fri Dec 13 00:12:01 2013
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.ql.hooks;

  import java.io.Serializable;

+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.Database;
  import org.apache.hadoop.hive.ql.metadata.Partition;
  import org.apache.hadoop.hive.ql.metadata.DummyPartition;
  import org.apache.hadoop.hive.ql.metadata.Table;
@@ -37,6 +39,10 @@ public class WriteEntity extends Entity
      super();
    }

+ public WriteEntity(Database database) {
+ super(database, true);
+ }
+
    /**
     * Constructor for a table.
     *
@@ -44,7 +50,7 @@ public class WriteEntity extends Entity
     * Table that is written to.
     */
    public WriteEntity(Table t) {
- this(t, true);
+ super(t, true);
    }

    public WriteEntity(Table t, boolean complete) {
@@ -58,11 +64,7 @@ public class WriteEntity extends Entity
     * Partition that is written to.
     */
    public WriteEntity(Partition p) {
- this(p, true);
- }
-
- public WriteEntity(Partition p, boolean complete) {
- super(p, complete);
+ super(p, true);
    }

    public WriteEntity(DummyPartition p, boolean complete) {
@@ -77,12 +79,8 @@ public class WriteEntity extends Entity
     * @param islocal
     * Flag to decide whether this directory is local or in dfs.
     */
- public WriteEntity(String d, boolean islocal) {
- this(d, islocal, true);
- }
-
- public WriteEntity(String d, boolean islocal, boolean complete) {
- super(d, islocal, complete);
+ public WriteEntity(Path d, boolean islocal) {
+ super(d.toString(), islocal, true);
    }

    /**

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Fri Dec 13 00:12:01 2013
@@ -39,6 +39,7 @@ import org.antlr.runtime.tree.Tree;
  import org.apache.commons.logging.Log;
  import org.apache.commons.logging.LogFactory;
  import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Database;
  import org.apache.hadoop.hive.metastore.api.FieldSchema;
  import org.apache.hadoop.hive.metastore.api.Order;
  import org.apache.hadoop.hive.ql.Context;
@@ -67,6 +68,7 @@ import org.apache.hadoop.hive.ql.optimiz
  import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
  import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
  import org.apache.hadoop.hive.ql.plan.PlanUtils;
+import org.apache.hadoop.hive.ql.session.SessionState;
  import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
  import org.apache.hadoop.hive.serde.serdeConstants;
  import org.apache.hadoop.hive.serde2.io.DateWritable;
@@ -77,7 +79,6 @@ import org.apache.hadoop.hive.serde2.typ
  import org.apache.hadoop.mapred.SequenceFileInputFormat;
  import org.apache.hadoop.mapred.SequenceFileOutputFormat;
  import org.apache.hadoop.mapred.TextInputFormat;
-import org.apache.hadoop.util.StringUtils;

  import com.google.common.annotations.VisibleForTesting;

@@ -1248,4 +1249,84 @@ public abstract class BaseSemanticAnalyz
      }
      return partitionDateFormat.format(value);
    }
+
+ protected Database getDatabase(String dbName) throws SemanticException {
+ return getDatabase(dbName, true);
+ }
+
+ protected Database getDatabase(String dbName, boolean throwException) throws SemanticException {
+ try {
+ Database database = db.getDatabase(dbName);
+ if (database == null && throwException) {
+ throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName));
+ }
+ return database;
+ } catch (HiveException e) {
+ throw new SemanticException(ErrorMsg.DATABASE_NOT_EXISTS.getMsg(dbName), e);
+ }
+ }
+
+ protected Table getTable(String tblName) throws SemanticException {
+ return getTable(null, tblName, true);
+ }
+
+ protected Table getTable(String tblName, boolean throwException) throws SemanticException {
+ String currentDb = SessionState.get().getCurrentDatabase();
+ return getTable(currentDb, tblName, throwException);
+ }
+
+ // qnName : possibly contains database name (dot seperated)
+ protected Table getTableWithQN(String qnName, boolean throwException) throws SemanticException {
+ int dot = qnName.indexOf('.');
+ if (dot < 0) {
+ String currentDb = SessionState.get().getCurrentDatabase();
+ return getTable(currentDb, qnName, throwException);
+ }
+ return getTable(qnName.substring(0, dot), qnName.substring(dot + 1), throwException);
+ }
+
+ protected Table getTable(String database, String tblName, boolean throwException)
+ throws SemanticException {
+ try {
+ Table tab = database == null ? db.getTable(tblName, false)
+ : db.getTable(database, tblName, false);
+ if (tab == null && throwException) {
+ throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
+ }
+ return tab;
+ } catch (HiveException e) {
+ throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName), e);
+ }
+ }
+
+ protected Partition getPartition(Table table, Map<String, String> partSpec,
+ boolean throwException) throws SemanticException {
+ try {
+ Partition partition = db.getPartition(table, partSpec, false);
+ if (partition == null && throwException) {
+ throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec));
+ }
+ return partition;
+ } catch (HiveException e) {
+ throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e);
+ }
+ }
+
+ protected List<Partition> getPartitions(Table table, Map<String, String> partSpec,
+ boolean throwException) throws SemanticException {
+ try {
+ List<Partition> partitions = partSpec == null ? db.getPartitions(table) :
+ db.getPartitions(table, partSpec);
+ if (partitions.isEmpty() && throwException) {
+ throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec));
+ }
+ return partitions;
+ } catch (HiveException e) {
+ throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e);
+ }
+ }
+
+ protected String toMessage(ErrorMsg message, Object detail) {
+ return detail == null ? message.getMsg() : message.getMsg(detail.toString());
+ }
  }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Fri Dec 13 00:12:01 2013
@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.conf.HiveC
  import org.apache.hadoop.hive.metastore.MetaStoreUtils;
  import org.apache.hadoop.hive.metastore.TableType;
  import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.Database;
  import org.apache.hadoop.hive.metastore.api.FieldSchema;
  import org.apache.hadoop.hive.metastore.api.Index;
  import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -95,6 +96,7 @@ import org.apache.hadoop.hive.ql.plan.Gr
  import org.apache.hadoop.hive.ql.plan.GrantRevokeRoleDDL;
  import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
  import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
+import org.apache.hadoop.hive.ql.plan.LockDatabaseDesc;
  import org.apache.hadoop.hive.ql.plan.LockTableDesc;
  import org.apache.hadoop.hive.ql.plan.MoveWork;
  import org.apache.hadoop.hive.ql.plan.MsckDesc;
@@ -121,6 +123,7 @@ import org.apache.hadoop.hive.ql.plan.St
  import org.apache.hadoop.hive.ql.plan.SwitchDatabaseDesc;
  import org.apache.hadoop.hive.ql.plan.TableDesc;
  import org.apache.hadoop.hive.ql.plan.TruncateTableDesc;
+import org.apache.hadoop.hive.ql.plan.UnlockDatabaseDesc;
  import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
  import org.apache.hadoop.hive.ql.security.authorization.Privilege;
  import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry;
@@ -298,6 +301,10 @@ public class DDLSemanticAnalyzer extends
        ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
        analyzeShowLocks(ast);
        break;
+ case HiveParser.TOK_SHOWDBLOCKS:
+ ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+ analyzeShowDbLocks(ast);
+ break;
      case HiveParser.TOK_DESCFUNCTION:
        ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
        analyzeDescFunction(ast);
@@ -394,6 +401,12 @@ public class DDLSemanticAnalyzer extends
      case HiveParser.TOK_UNLOCKTABLE:
        analyzeUnlockTable(ast);
        break;
+ case HiveParser.TOK_LOCKDB:
+ analyzeLockDatabase(ast);
+ break;
+ case HiveParser.TOK_UNLOCKDB:
+ analyzeUnlockDatabase(ast);
+ break;
      case HiveParser.TOK_CREATEDATABASE:
        analyzeCreateDatabase(ast);
        break;
@@ -809,6 +822,14 @@ public class DDLSemanticAnalyzer extends
        ifCascade = true;
      }

+ Database database = getDatabase(dbName, !ifExists);
+ if (database == null) {
+ return;
+ }
+
+ inputs.add(new ReadEntity(database));
+ outputs.add(new WriteEntity(database));
+
      DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbName, ifExists, ifCascade);
      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc), conf));
    }
@@ -2274,6 +2295,29 @@ public class DDLSemanticAnalyzer extends
      ctx.setNeedLockMgr(true);
    }

+ /**
+ * Add the task according to the parsed command tree. This is used for the CLI
+ * command "SHOW LOCKS DATABASE database [extended];".
+ *
+ * @param ast
+ * The parsed command tree.
+ * @throws SemanticException
+ * Parsing failed
+ */
+ private void analyzeShowDbLocks(ASTNode ast) throws SemanticException {
+ boolean isExtended = (ast.getChildCount() > 1);
+ String dbName = stripQuotes(ast.getChild(0).getText());
+
+ ShowLocksDesc showLocksDesc = new ShowLocksDesc(ctx.getResFile(), dbName,
+ isExtended);
+ rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+ showLocksDesc), conf));
+ setFetchTask(createFetchTask(showLocksDesc.getSchema()));
+
+ // Need to initialize the lock manager
+ ctx.setNeedLockMgr(true);
+ }
+
    /**
     * Add the task according to the parsed command tree. This is used for the CLI
     * command "LOCK TABLE ..;".
@@ -2335,6 +2379,30 @@ public class DDLSemanticAnalyzer extends
      ctx.setNeedLockMgr(true);
    }

+ private void analyzeLockDatabase(ASTNode ast) throws SemanticException {
+ String dbName = unescapeIdentifier(ast.getChild(0).getText());
+ String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase());
+
+ //inputs.add(new ReadEntity(dbName));
+ //outputs.add(new WriteEntity(dbName));
+ LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode,
+ HiveConf.getVar(conf, ConfVars.HIVEQUERYID));
+ lockDatabaseDesc.setQueryStr(ctx.getCmd());
+ DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc);
+ rootTasks.add(TaskFactory.get(work, conf));
+ ctx.setNeedLockMgr(true);
+ }
+
+ private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException {
+ String dbName = unescapeIdentifier(ast.getChild(0).getText());
+
+ UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName);
+ DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc);
+ rootTasks.add(TaskFactory.get(work, conf));
+ // Need to initialize the lock manager
+ ctx.setNeedLockMgr(true);
+ }
+
    /**
     * Add the task according to the parsed command tree. This is used for the CLI
     * command "DESCRIBE FUNCTION;".
@@ -2531,7 +2599,7 @@ public class DDLSemanticAnalyzer extends

      // check if table exists.
      try {
- tab = db.getTable(SessionState.get().getCurrentDatabase(), tblName, true);
+ tab = getTable(tblName, true);
        inputs.add(new ReadEntity(tab));
      } catch (HiveException e) {
        throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
@@ -3280,57 +3348,4 @@ public class DDLSemanticAnalyzer extends
        throw new SemanticException(e);
      }
    }
-
- private Table getTable(String tblName) throws SemanticException {
- return getTable(null, tblName, true);
- }
-
- private Table getTable(String tblName, boolean throwException) throws SemanticException {
- return getTable(SessionState.get().getCurrentDatabase(), tblName, throwException);
- }
-
- private Table getTable(String database, String tblName, boolean throwException)
- throws SemanticException {
- try {
- Table tab = database == null ? db.getTable(tblName, false)
- : db.getTable(database, tblName, false);
- if (tab == null && throwException) {
- throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
- }
- return tab;
- } catch (HiveException e) {
- throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tblName));
- }
- }
-
- private Partition getPartition(Table table, Map<String, String> partSpec, boolean throwException)
- throws SemanticException {
- try {
- Partition partition = db.getPartition(table, partSpec, false);
- if (partition == null && throwException) {
- throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec));
- }
- return partition;
- } catch (HiveException e) {
- throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e);
- }
- }
-
- private List<Partition> getPartitions(Table table, Map<String, String> partSpec,
- boolean throwException) throws SemanticException {
- try {
- List<Partition> partitions = partSpec == null ? db.getPartitions(table) :
- db.getPartitions(table, partSpec);
- if (partitions.isEmpty() && throwException) {
- throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec));
- }
- return partitions;
- } catch (HiveException e) {
- throw new SemanticException(toMessage(ErrorMsg.INVALID_PARTITION, partSpec), e);
- }
- }
-
- private String toMessage(ErrorMsg message, Object detail) {
- return detail == null ? message.getMsg() : message.getMsg(detail.toString());
- }
  }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java Fri Dec 13 00:12:01 2013
@@ -101,10 +101,12 @@ public class ExportSemanticAnalyzer exte
                .getMsg("Exception while writing out the local file"), e);
      }

+ Path parentPath = new Path(toURI);
+
      if (ts.tableHandle.isPartitioned()) {
        for (Partition partition : partitions) {
          URI fromURI = partition.getDataLocation();
- Path toPartPath = new Path(toURI.toString(), partition.getName());
+ Path toPartPath = new Path(parentPath, partition.getName());
          Task<? extends Serializable> rTask = TaskFactory.get(
              new CopyWork(fromURI.toString(), toPartPath.toString(), false),
              conf);
@@ -113,13 +115,12 @@ public class ExportSemanticAnalyzer exte
        }
      } else {
        URI fromURI = ts.tableHandle.getDataLocation();
- Path toDataPath = new Path(toURI.toString(), "data");
+ Path toDataPath = new Path(parentPath, "data");
        Task<? extends Serializable> rTask = TaskFactory.get(new CopyWork(
            fromURI.toString(), toDataPath.toString(), false), conf);
        rootTasks.add(rTask);
        inputs.add(new ReadEntity(ts.tableHandle));
      }
- outputs.add(new WriteEntity(toURI.toString(),
- toURI.getScheme().equals("hdfs") ? true : false));
+ outputs.add(new WriteEntity(parentPath, toURI.getScheme().equals("hdfs")));
    }
  }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Fri Dec 13 00:12:01 2013
@@ -164,6 +164,8 @@ TOK_SHOW_TBLPROPERTIES;
  TOK_SHOWLOCKS;
  TOK_LOCKTABLE;
  TOK_UNLOCKTABLE;
+TOK_LOCKDB;
+TOK_UNLOCKDB;
  TOK_SWITCHDATABASE;
  TOK_DROPDATABASE;
  TOK_DROPTABLE;
@@ -273,6 +275,7 @@ TOK_GRANT_ROLE;
  TOK_REVOKE_ROLE;
  TOK_SHOW_ROLE_GRANT;
  TOK_SHOWINDEXES;
+TOK_SHOWDBLOCKS;
  TOK_INDEXCOMMENT;
  TOK_DESCDATABASE;
  TOK_DATABASEPROPERTIES;
@@ -624,6 +627,8 @@ ddlStatement
analyzeStatement
lockStatement
unlockStatement
+ | lockDatabase
+ | unlockDatabase
createRoleStatement
dropRoleStatement
grantPrivileges
@@ -1237,6 +1242,7 @@ showStatement
      -> ^(TOK_SHOW_TABLESTATUS showStmtIdentifier $db_name? partitionSpec?)
KW_SHOW KW_TBLPROPERTIES tblName=identifier (LPAREN prptyName=StringLiteral RPAREN)? -> ^(TOK_SHOW_TBLPROPERTIES $tblName $prptyName?)
KW_SHOW KW_LOCKS (parttype=partTypeExpr)? (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWLOCKS $parttype? $isExtended?)
+ | KW_SHOW KW_LOCKS KW_DATABASE (dbName=Identifier) (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWDBLOCKS $dbName $isExtended?)
KW_SHOW (showOptions=KW_FORMATTED)? (KW_INDEX|KW_INDEXES) KW_ON showStmtIdentifier ((KW_FROM|KW_IN) db_name=identifier)?
      -> ^(TOK_SHOWINDEXES showStmtIdentifier $showOptions? $db_name?)
      ;
@@ -1247,6 +1253,12 @@ lockStatement
      : KW_LOCK KW_TABLE tableName partitionSpec? lockMode -> ^(TOK_LOCKTABLE tableName lockMode partitionSpec?)
      ;

+lockDatabase
+@init { msgs.push("lock database statement"); }
+@after { msgs.pop(); }
+ : KW_LOCK KW_DATABASE (dbName=Identifier) lockMode -> ^(TOK_LOCKDB $dbName lockMode)
+ ;
+
  lockMode
  @init { msgs.push("lock mode"); }
  @after { msgs.pop(); }
@@ -1259,6 +1271,12 @@ unlockStatement
      : KW_UNLOCK KW_TABLE tableName partitionSpec? -> ^(TOK_UNLOCKTABLE tableName partitionSpec?)
      ;

+unlockDatabase
+@init { msgs.push("unlock database statement"); }
+@after { msgs.pop(); }
+ : KW_UNLOCK KW_DATABASE (dbName=Identifier) -> ^(TOK_UNLOCKDB $dbName)
+ ;
+
  createRoleStatement
  @init { msgs.push("create role"); }
  @after { msgs.pop(); }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Fri Dec 13 00:12:01 2013
@@ -5564,7 +5564,7 @@ public class SemanticAnalyzer extends Ba
          table_desc = PlanUtils.getTableDesc(tblDesc, cols, colTypes);
        }

- if (!outputs.add(new WriteEntity(destStr, !isDfsDir))) {
+ if (!outputs.add(new WriteEntity(dest_path, !isDfsDir))) {
          throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES
              .getMsg(destStr));
        }
@@ -8544,7 +8544,7 @@ public class SemanticAnalyzer extends Ba
        tsDesc.setStatsAggPrefix(tab.getDbName()+"."+k);

        // set up WritenEntity for replication
- outputs.add(new WriteEntity(tab, true));
+ outputs.add(new WriteEntity(tab));

        // add WriteEntity for each matching partition
        if (tab.isPartitioned()) {
@@ -8555,7 +8555,7 @@ public class SemanticAnalyzer extends Ba
          if (partitions != null) {
            for (Partition partn : partitions) {
              // inputs.add(new ReadEntity(partn)); // is this needed at all?
- outputs.add(new WriteEntity(partn, true));
+ outputs.add(new WriteEntity(partn));
            }
          }
        }
@@ -9542,9 +9542,7 @@ public class SemanticAnalyzer extends Ba
      // check for existence of table
      if (ifNotExists) {
        try {
- Table table = db.getTable(tableName, false); // use getTable(final String tableName, boolean
- // throwException) which doesn't throw
- // exception but null if table doesn't exist
+ Table table = getTableWithQN(tableName, false);
          if (table != null) { // table exists
            return null;
          }
@@ -9698,7 +9696,7 @@ public class SemanticAnalyzer extends Ba
    private void validateCreateView(CreateViewDesc createVwDesc)
      throws SemanticException {
      try {
- Table oldView = db.getTable(createVwDesc.getViewName(), false);
+ Table oldView = getTableWithQN(createVwDesc.getViewName(), false);

        // ALTER VIEW AS SELECT requires the view must exist
        if (createVwDesc.getIsAlterViewAs() && oldView == null) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Fri Dec 13 00:12:01 2013
@@ -68,6 +68,7 @@ public final class SemanticAnalyzerFacto
      commandType.put(HiveParser.TOK_SHOWINDEXES, HiveOperation.SHOWINDEXES);
      commandType.put(HiveParser.TOK_SHOWPARTITIONS, HiveOperation.SHOWPARTITIONS);
      commandType.put(HiveParser.TOK_SHOWLOCKS, HiveOperation.SHOWLOCKS);
+ commandType.put(HiveParser.TOK_SHOWDBLOCKS, HiveOperation.SHOWLOCKS);
      commandType.put(HiveParser.TOK_CREATEFUNCTION, HiveOperation.CREATEFUNCTION);
      commandType.put(HiveParser.TOK_DROPFUNCTION, HiveOperation.DROPFUNCTION);
      commandType.put(HiveParser.TOK_CREATEMACRO, HiveOperation.CREATEMACRO);
@@ -85,6 +86,8 @@ public final class SemanticAnalyzerFacto
      commandType.put(HiveParser.TOK_QUERY, HiveOperation.QUERY);
      commandType.put(HiveParser.TOK_LOCKTABLE, HiveOperation.LOCKTABLE);
      commandType.put(HiveParser.TOK_UNLOCKTABLE, HiveOperation.UNLOCKTABLE);
+ commandType.put(HiveParser.TOK_LOCKDB, HiveOperation.LOCKDB);
+ commandType.put(HiveParser.TOK_UNLOCKDB, HiveOperation.UNLOCKDB);
      commandType.put(HiveParser.TOK_CREATEROLE, HiveOperation.CREATEROLE);
      commandType.put(HiveParser.TOK_DROPROLE, HiveOperation.DROPROLE);
      commandType.put(HiveParser.TOK_GRANT, HiveOperation.GRANT_PRIVILEGE);
@@ -185,6 +188,7 @@ public final class SemanticAnalyzerFacto
        case HiveParser.TOK_SHOWPARTITIONS:
        case HiveParser.TOK_SHOWINDEXES:
        case HiveParser.TOK_SHOWLOCKS:
+ case HiveParser.TOK_SHOWDBLOCKS:
        case HiveParser.TOK_CREATEINDEX:
        case HiveParser.TOK_DROPINDEX:
        case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
@@ -194,6 +198,8 @@ public final class SemanticAnalyzerFacto
        case HiveParser.TOK_ALTERTABLE_ALTERPARTS:
        case HiveParser.TOK_LOCKTABLE:
        case HiveParser.TOK_UNLOCKTABLE:
+ case HiveParser.TOK_LOCKDB:
+ case HiveParser.TOK_UNLOCKDB:
        case HiveParser.TOK_CREATEROLE:
        case HiveParser.TOK_DROPROLE:
        case HiveParser.TOK_GRANT:

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Fri Dec 13 00:12:01 2013
@@ -36,6 +36,8 @@ public class DDLWork implements Serializ
    private CreateDatabaseDesc createDatabaseDesc;
    private SwitchDatabaseDesc switchDatabaseDesc;
    private DropDatabaseDesc dropDatabaseDesc;
+ private LockDatabaseDesc lockDatabaseDesc;
+ private UnlockDatabaseDesc unlockDatabaseDesc;
    private CreateTableDesc createTblDesc;
    private CreateTableLikeDesc createTblLikeDesc;
    private CreateViewDesc createVwDesc;
@@ -284,6 +286,24 @@ public class DDLWork implements Serializ
    }

    /**
+ * @param lockDatabaseDesc
+ */
+ public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+ LockDatabaseDesc lockDatabaseDesc) {
+ this(inputs, outputs);
+ this.lockDatabaseDesc = lockDatabaseDesc;
+ }
+
+ /**
+ * @param unlockDatabaseDesc
+ */
+ public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+ UnlockDatabaseDesc unlockDatabaseDesc) {
+ this(inputs, outputs);
+ this.unlockDatabaseDesc = unlockDatabaseDesc;
+ }
+
+ /**
     * @param showFuncsDesc
     */
    public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
@@ -501,6 +521,22 @@ public class DDLWork implements Serializ
      this.switchDatabaseDesc = switchDatabaseDesc;
    }

+ public LockDatabaseDesc getLockDatabaseDesc() {
+ return lockDatabaseDesc;
+ }
+
+ public void setLockDatabaseDesc(LockDatabaseDesc lockDatabaseDesc) {
+ this.lockDatabaseDesc = lockDatabaseDesc;
+ }
+
+ public UnlockDatabaseDesc getUnlockDatabaseDesc() {
+ return unlockDatabaseDesc;
+ }
+
+ public void setUnlockDatabaseDesc(UnlockDatabaseDesc unlockDatabaseDesc) {
+ this.unlockDatabaseDesc = unlockDatabaseDesc;
+ }
+
    /**
     * @return the createTblDesc
     */

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Fri Dec 13 00:12:01 2013
@@ -25,9 +25,11 @@ public enum HiveOperation {
    LOAD("LOAD", null, new Privilege[]{Privilege.ALTER_DATA}),
    EXPORT("EXPORT", new Privilege[]{Privilege.SELECT}, null),
    IMPORT("IMPORT", null, new Privilege[]{Privilege.ALTER_METADATA, Privilege.ALTER_DATA}),
- CREATEDATABASE("CREATEDATABASE", null, null),
- DROPDATABASE("DROPDATABASE", null, null),
- SWITCHDATABASE("SWITCHDATABASE", null, null),
+ CREATEDATABASE("CREATEDATABASE", null, new Privilege[]{Privilege.CREATE}),
+ DROPDATABASE("DROPDATABASE", null, new Privilege[]{Privilege.DROP}),
+ SWITCHDATABASE("SWITCHDATABASE", new Privilege[]{Privilege.SELECT}, null),
+ LOCKDB("LOCKDATABASE", new Privilege[]{Privilege.LOCK}, null),
+ UNLOCKDB("UNLOCKDATABASE", new Privilege[]{Privilege.LOCK}, null),
    DROPTABLE ("DROPTABLE", null, new Privilege[]{Privilege.DROP}),
    DESCTABLE("DESCTABLE", null, null),
    DESCFUNCTION("DESCFUNCTION", null, null),

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowLocksDesc.java Fri Dec 13 00:12:01 2013
@@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path;
  public class ShowLocksDesc extends DDLDesc implements Serializable {
    private static final long serialVersionUID = 1L;
    String resFile;
+ String dbName;
    String tableName;
    HashMap<String, String> partSpec;
    boolean isExt;
@@ -44,6 +45,10 @@ public class ShowLocksDesc extends DDLDe
     */
    private static final String schema = "tab_name,mode#string:string";

+ public String getDatabase() {
+ return dbName;
+ }
+
    public String getTable() {
      return table;
    }
@@ -58,6 +63,17 @@ public class ShowLocksDesc extends DDLDe
    /**
     * @param resFile
     */
+ public ShowLocksDesc(Path resFile, String dbName, boolean isExt) {
+ this.resFile = resFile.toString();
+ this.partSpec = null;
+ this.tableName = null;
+ this.isExt = isExt;
+ this.dbName = dbName;
+ }
+
+ /**
+ * @param resFile
+ */
    public ShowLocksDesc(Path resFile, String tableName,
                         HashMap<String, String> partSpec, boolean isExt) {
      this.resFile = resFile.toString();
@@ -66,6 +82,14 @@ public class ShowLocksDesc extends DDLDe
      this.isExt = isExt;
    }

+ public String getDbName() {
+ return dbName;
+ }
+
+ public void setDbName(String dbName) {
+ this.dbName = dbName;
+ }
+
    /**
     * @return the tableName
     */

Added: hive/trunk/ql/src/test/queries/clientnegative/authorization_fail_create_db.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/authorization_fail_create_db.q?rev=1550601&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/authorization_fail_create_db.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/authorization_fail_create_db.q Fri Dec 13 00:12:01 2013
@@ -0,0 +1,5 @@
+set hive.security.authorization.enabled=true;
+
+create database db_to_fail;
+
+

Added: hive/trunk/ql/src/test/queries/clientnegative/authorization_fail_drop_db.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/authorization_fail_drop_db.q?rev=1550601&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/authorization_fail_drop_db.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/authorization_fail_drop_db.q Fri Dec 13 00:12:01 2013
@@ -0,0 +1,5 @@
+set hive.security.authorization.enabled=false;
+create database db_fail_to_drop;
+set hive.security.authorization.enabled=true;
+
+drop database db_fail_to_drop;

Added: hive/trunk/ql/src/test/queries/clientnegative/lockneg_query_tbl_in_locked_db.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/lockneg_query_tbl_in_locked_db.q?rev=1550601&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/lockneg_query_tbl_in_locked_db.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/lockneg_query_tbl_in_locked_db.q Fri Dec 13 00:12:01 2013
@@ -0,0 +1,17 @@
+create database lockneg1;
+use lockneg1;
+
+create table tstsrcpart like default.srcpart;
+
+insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
+select key, value from default.srcpart where ds='2008-04-08' and hr='11';
+
+lock database lockneg1 shared;
+show locks database lockneg1;
+select count(1) from tstsrcpart where ds='2008-04-08' and hr='11';
+
+unlock database lockneg1;
+show locks database lockneg1;
+lock database lockneg1 exclusive;
+show locks database lockneg1;
+select count(1) from tstsrcpart where ds='2008-04-08' and hr='11';

Added: hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_db_lock_conflict.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_db_lock_conflict.q?rev=1550601&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_db_lock_conflict.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_db_lock_conflict.q Fri Dec 13 00:12:01 2013
@@ -0,0 +1,6 @@
+set hive.lock.numretries=0;
+
+create database lockneg4;
+
+lock database lockneg4 exclusive;
+lock database lockneg4 shared;

Added: hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_drop_locked_db.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_drop_locked_db.q?rev=1550601&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_drop_locked_db.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_drop_locked_db.q Fri Dec 13 00:12:01 2013
@@ -0,0 +1,8 @@
+set hive.lock.numretries=0;
+
+create database lockneg9;
+
+lock database lockneg9 shared;
+show locks database lockneg9;
+
+drop database lockneg9;

Added: hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_lock_db_in_use.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_lock_db_in_use.q?rev=1550601&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_lock_db_in_use.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/lockneg_try_lock_db_in_use.q Fri Dec 13 00:12:01 2013
@@ -0,0 +1,15 @@
+set hive.lock.numretries=0;
+
+create database lockneg2;
+use lockneg2;
+
+create table tstsrcpart like default.srcpart;
+
+insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
+select key, value from default.srcpart where ds='2008-04-08' and hr='11';
+
+lock table tstsrcpart shared;
+show locks;
+
+lock database lockneg2 exclusive;
+show locks;

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_2.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_2.q.out Fri Dec 13 00:12:01 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
  POSTHOOK: query: create table authorization_fail_2 (key int, value string) partitioned by (ds string)
  POSTHOOK: type: CREATETABLE
  POSTHOOK: Output: default@authorization_fail_2
-Authorization failed:No privilege 'Create' found for inputs { database:default, table:authorization_fail_2}. Use show grant to get more details.
+Authorization failed:No privilege 'Create' found for inputs { database:default, table:authorization_fail_2}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_3.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_3.q.out Fri Dec 13 00:12:01 2013
@@ -36,4 +36,4 @@ PREHOOK: query: show grant user hive_tes
  PREHOOK: type: SHOW_GRANT
  POSTHOOK: query: show grant user hive_test_user on table authorization_fail_3 partition (ds='2010')
  POSTHOOK: type: SHOW_GRANT
-Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail_3, columnName:key}. Use show grant to get more details.
+Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail_3, columnName:key}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_4.q.out Fri Dec 13 00:12:01 2013
@@ -76,4 +76,4 @@ principalType USER
  privilege Create
  #### A masked pattern was here ####
  grantor hive_test_user
-Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail_4, partitionName:ds=2010, columnName:key}. Use show grant to get more details.
+Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail_4, partitionName:ds=2010, columnName:key}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_5.q.out Fri Dec 13 00:12:01 2013
@@ -127,4 +127,4 @@ principalType USER
  privilege Create
  #### A masked pattern was here ####
  grantor hive_test_user
-Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail, partitionName:ds=2010, columnName:key}. Use show grant to get more details.
+Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail, partitionName:ds=2010, columnName:key}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_6.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_6.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_6.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_6.q.out Fri Dec 13 00:12:01 2013
@@ -7,4 +7,4 @@ POSTHOOK: query: -- SORT_BEFORE_DIFF
  create table authorization_part_fail (key int, value string) partitioned by (ds string)
  POSTHOOK: type: CREATETABLE
  POSTHOOK: Output: default@authorization_part_fail
-Authorization failed:No privilege 'Alter' found for inputs { database:default, table:authorization_part_fail}. Use show grant to get more details.
+Authorization failed:No privilege 'Alter' found for inputs { database:default, table:authorization_part_fail}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_7.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_7.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_7.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_7.q.out Fri Dec 13 00:12:01 2013
@@ -42,4 +42,4 @@ PREHOOK: query: drop role hive_test_role
  PREHOOK: type: DROPROLE
  POSTHOOK: query: drop role hive_test_role_fail
  POSTHOOK: type: DROPROLE
-Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail, columnName:key}. Use show grant to get more details.
+Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_fail, columnName:key}. Use SHOW GRANT to get more details.

Added: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_create_db.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_create_db.q.out?rev=1550601&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_create_db.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_create_db.q.out Fri Dec 13 00:12:01 2013
@@ -0,0 +1 @@
+Authorization failed:No privilege 'Create' found for outputs { }. Use SHOW GRANT to get more details.

Added: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_drop_db.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_drop_db.q.out?rev=1550601&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_drop_db.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_drop_db.q.out Fri Dec 13 00:12:01 2013
@@ -0,0 +1,5 @@
+PREHOOK: query: create database db_fail_to_drop
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database db_fail_to_drop
+POSTHOOK: type: CREATEDATABASE
+Authorization failed:No privilege 'Drop' found for outputs { database:db_fail_to_drop}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out Fri Dec 13 00:12:01 2013
@@ -274,4 +274,4 @@ POSTHOOK: Lineage: authorization_part_fa
  POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ]
  POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2011).key EXPRESSION [(src_auth)src_auth.FieldSchema(name:key, type:string, comment:null), ]
  POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2011).value SIMPLE [(src_auth)src_auth.FieldSchema(name:value, type:string, comment:null), ]
-Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_part_fail, partitionName:ds=2010, columnName:value}. Use show grant to get more details.
+Authorization failed:No privilege 'Select' found for inputs { database:default, table:authorization_part_fail, partitionName:ds=2010, columnName:value}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/database_drop_does_not_exist.q.out Fri Dec 13 00:12:01 2013
@@ -3,7 +3,4 @@ PREHOOK: type: SHOWDATABASES
  POSTHOOK: query: SHOW DATABASES
  POSTHOOK: type: SHOWDATABASES
  default
-PREHOOK: query: -- Try to drop a database that does not exist
-DROP DATABASE does_not_exist
-PREHOOK: type: DROPDATABASE
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Database does not exist: does_not_exist
+FAILED: SemanticException [Error 10072]: Database does not exist: does_not_exist

Modified: hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty.q.out Fri Dec 13 00:12:01 2013
@@ -24,4 +24,6 @@ POSTHOOK: query: USE default
  POSTHOOK: type: SWITCHDATABASE
  PREHOOK: query: DROP DATABASE test_db
  PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:test_db
+PREHOOK: Output: database:test_db
  FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Database test_db is not empty)

Modified: hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/database_drop_not_empty_restrict.q.out Fri Dec 13 00:12:01 2013
@@ -24,4 +24,6 @@ POSTHOOK: query: USE default
  POSTHOOK: type: SWITCHDATABASE
  PREHOOK: query: DROP DATABASE db_drop_non_empty_restrict
  PREHOOK: type: DROPDATABASE
+PREHOOK: Input: database:db_drop_non_empty_restrict
+PREHOOK: Output: database:db_drop_non_empty_restrict
  FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. InvalidOperationException(message:Database db_drop_non_empty_restrict is not empty)

Modified: hive/trunk/ql/src/test/results/clientnegative/exim_22_export_authfail.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/exim_22_export_authfail.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/exim_22_export_authfail.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/exim_22_export_authfail.q.out Fri Dec 13 00:12:01 2013
@@ -4,4 +4,4 @@ POSTHOOK: query: create table exim_depar
  POSTHOOK: type: CREATETABLE
  POSTHOOK: Output: default@exim_department
  #### A masked pattern was here ####
-Authorization failed:No privilege 'Select' found for inputs { database:default, table:exim_department}. Use show grant to get more details.
+Authorization failed:No privilege 'Select' found for inputs { database:default, table:exim_department}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/exim_23_import_exist_authfail.q.out Fri Dec 13 00:12:01 2013
@@ -39,4 +39,4 @@ PREHOOK: type: CREATETABLE
  POSTHOOK: query: create table exim_department ( dep_id int) stored as textfile
  POSTHOOK: type: CREATETABLE
  POSTHOOK: Output: importer@exim_department
-Authorization failed:No privilege 'Alter' found for outputs { database:importer, table:exim_department}. Use show grant to get more details.
+Authorization failed:No privilege 'Alter' found for outputs { database:importer, table:exim_department}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/exim_24_import_part_authfail.q.out Fri Dec 13 00:12:01 2013
@@ -58,4 +58,4 @@ POSTHOOK: query: create table exim_emplo
   tblproperties("creator"="krishna")
  POSTHOOK: type: CREATETABLE
  POSTHOOK: Output: importer@exim_employee
-Authorization failed:No privilege 'Alter' found for outputs { database:importer, table:exim_employee}. Use show grant to get more details.
+Authorization failed:No privilege 'Alter' found for outputs { database:importer, table:exim_employee}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/exim_25_import_nonexist_authfail.q.out Fri Dec 13 00:12:01 2013
@@ -34,4 +34,4 @@ PREHOOK: query: use importer
  PREHOOK: type: SWITCHDATABASE
  POSTHOOK: query: use importer
  POSTHOOK: type: SWITCHDATABASE
-Authorization failed:No privilege 'Create' found for outputs { database:importer}. Use show grant to get more details.
+Authorization failed:No privilege 'Create' found for outputs { database:importer}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/join_nonexistent_part.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/join_nonexistent_part.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/join_nonexistent_part.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/join_nonexistent_part.q.out Fri Dec 13 00:12:01 2013
@@ -1 +1 @@
-Authorization failed:No privilege 'Select' found for inputs { database:default, table:src, columnName:key}. Use show grant to get more details.
+Authorization failed:No privilege 'Select' found for inputs { database:default, table:src, columnName:key}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/load_exist_part_authfail.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/load_exist_part_authfail.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/load_exist_part_authfail.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/load_exist_part_authfail.q.out Fri Dec 13 00:12:01 2013
@@ -10,4 +10,4 @@ POSTHOOK: query: alter table hive_test_s
  POSTHOOK: type: ALTERTABLE_ADDPARTS
  POSTHOOK: Input: default@hive_test_src
  POSTHOOK: Output: default@hive_test_src@pcol1=test_part
-Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use show grant to get more details.
+Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/load_nonpart_authfail.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/load_nonpart_authfail.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/load_nonpart_authfail.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/load_nonpart_authfail.q.out Fri Dec 13 00:12:01 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
  POSTHOOK: query: create table hive_test_src ( col1 string ) stored as textfile
  POSTHOOK: type: CREATETABLE
  POSTHOOK: Output: default@hive_test_src
-Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use show grant to get more details.
+Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use SHOW GRANT to get more details.

Modified: hive/trunk/ql/src/test/results/clientnegative/load_part_authfail.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/load_part_authfail.q.out?rev=1550601&r1=1550600&r2=1550601&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/load_part_authfail.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/load_part_authfail.q.out Fri Dec 13 00:12:01 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
  POSTHOOK: query: create table hive_test_src ( col1 string ) partitioned by (pcol1 string) stored as textfile
  POSTHOOK: type: CREATETABLE
  POSTHOOK: Output: default@hive_test_src
-Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use show grant to get more details.
+Authorization failed:No privilege 'Update' found for outputs { database:default, table:hive_test_src}. Use SHOW GRANT to get more details.

Search Discussions

Discussion Posts

Previous

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 2 of 2 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedDec 13, '13 at 12:12a
activeDec 13, '13 at 12:12a
posts2
users1
websitehive.apache.org

1 user in discussion

Thejas: 2 posts

People

Translate

site design / logo © 2021 Grokbase