FAQ
Author: thejas
Date: Fri Jul 18 18:19:14 2014
New Revision: 1611749

URL: http://svn.apache.org/r1611749
Log:
HIVE-7416 : provide context information to authorization checkPrivileges api call (Thejas Nair, reviwed by Jason Dere)

Added:
     hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzContext.java
Modified:
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
     hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java

Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java?rev=1611749&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestHS2AuthzContext.java Fri Jul 18 18:19:14 2014
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.jdbc.authorization;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.verify;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Matchers;
+import org.mockito.Mockito;
+/**
+ * Test context information that gets passed to authorization api
+ */
+public class TestHS2AuthzContext {
+ private static MiniHS2 miniHS2 = null;
+ static HiveAuthorizer mockedAuthorizer;
+
+ /**
+ * This factory creates a mocked HiveAuthorizer class.
+ * Use the mocked class to capture the argument passed to it in the test case.
+ */
+ static class MockedHiveAuthorizerFactory implements HiveAuthorizerFactory {
+ @Override
+ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
+ HiveConf conf, HiveAuthenticationProvider authenticator) {
+ TestHS2AuthzContext.mockedAuthorizer = Mockito.mock(HiveAuthorizer.class);
+ return TestHS2AuthzContext.mockedAuthorizer;
+ }
+ }
+
+ @BeforeClass
+ public static void beforeTest() throws Exception {
+ Class.forName(MiniHS2.getJdbcDriverName());
+ HiveConf conf = new HiveConf();
+ conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName());
+ conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName());
+ conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
+ conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+ conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false);
+
+ miniHS2 = new MiniHS2(conf);
+ miniHS2.start(new HashMap<String, String>());
+ }
+
+ @AfterClass
+ public static void afterTest() throws Exception {
+ if (miniHS2.isStarted()) {
+ miniHS2.stop();
+ }
+ }
+
+ @Test
+ public void testAuthzContextContents() throws Exception {
+
+ Connection hs2Conn = getConnection("user1");
+ Statement stmt = hs2Conn.createStatement();
+
+ final String cmd = "show tables";
+ stmt.execute(cmd);
+ stmt.close();
+ hs2Conn.close();
+
+ ArgumentCaptor<HiveAuthzContext> contextCapturer = ArgumentCaptor
+ .forClass(HiveAuthzContext.class);
+
+ verify(mockedAuthorizer).checkPrivileges(any(HiveOperationType.class),
+ Matchers.anyListOf(HivePrivilegeObject.class),
+ Matchers.anyListOf(HivePrivilegeObject.class), contextCapturer.capture());
+
+ HiveAuthzContext context = contextCapturer.getValue();
+
+ assertEquals("Command ", cmd, context.getCommandString());
+ assertTrue("ip address pattern check", context.getIpAddress().contains("."));
+ // ip address size check - check for something better than non zero
+ assertTrue("ip address size check", context.getIpAddress().length() > 7);
+ // session string is supposed to be unique, so its got to be of some reasonable size
+ assertTrue("session string size check", context.getSessionString().length() > 10);
+ assertEquals("Client type ", HiveAuthzContext.CLIENT_TYPE.HIVESERVER2, context.getClientType());
+ }
+
+ private Connection getConnection(String userName) throws SQLException {
+ return DriverManager.getConnection(miniHS2.getJdbcURL(), userName, "bar");
+ }
+
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1611749&r1=1611748&r2=1611749&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Fri Jul 18 18:19:14 2014
@@ -101,6 +101,8 @@ import org.apache.hadoop.hive.ql.process
  import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
  import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
  import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext.CLIENT_TYPE;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType;
@@ -445,7 +447,7 @@ public class Driver implements CommandPr

          try {
            perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION);
- doAuthorization(sem);
+ doAuthorization(sem, command);
          } catch (AuthorizationException authExp) {
            console.printError("Authorization failed:" + authExp.getMessage()
                + ". Use SHOW GRANT to get more details.");
@@ -483,15 +485,25 @@ public class Driver implements CommandPr
      }
    }

- public static void doAuthorization(BaseSemanticAnalyzer sem)
+ /**
+ * Do authorization using post semantic analysis information in the semantic analyzer
+ * The original command is also passed so that authorization interface can provide
+ * more useful information in logs.
+ * @param sem
+ * @param command
+ * @throws HiveException
+ * @throws AuthorizationException
+ */
+ public static void doAuthorization(BaseSemanticAnalyzer sem, String command)
        throws HiveException, AuthorizationException {
      HashSet<ReadEntity> inputs = sem.getInputs();
      HashSet<WriteEntity> outputs = sem.getOutputs();
      SessionState ss = SessionState.get();
      HiveOperation op = ss.getHiveOperation();
      Hive db = sem.getDb();
+
      if (ss.isAuthorizationModeV2()) {
- doAuthorizationV2(ss, op, inputs, outputs);
+ doAuthorizationV2(ss, op, inputs, outputs, command);
        return;
      }
      if (op == null) {
@@ -672,11 +684,20 @@ public class Driver implements CommandPr
    }

    private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet<ReadEntity> inputs,
- HashSet<WriteEntity> outputs) throws HiveException {
+ HashSet<WriteEntity> outputs, String command) throws HiveException {
+
+ HiveAuthzContext.Builder authzContextBuilder = new HiveAuthzContext.Builder();
+
+ authzContextBuilder.setClientType(ss.isHiveServerQuery() ? CLIENT_TYPE.HIVESERVER2
+ : CLIENT_TYPE.HIVECLI);
+ authzContextBuilder.setUserIpAddress(ss.getUserIpAddress());
+ authzContextBuilder.setSessionString(ss.getSessionId());
+ authzContextBuilder.setCommandString(command);
+
      HiveOperationType hiveOpType = getHiveOperationType(op);
      List<HivePrivilegeObject> inputsHObjs = getHivePrivObjects(inputs);
      List<HivePrivilegeObject> outputHObjs = getHivePrivObjects(outputs);
- ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs);
+ ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs, authzContextBuilder.build());
      return;
    }


Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1611749&r1=1611748&r2=1611749&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Fri Jul 18 18:19:14 2014
@@ -32,8 +32,8 @@ import java.util.Collection;
  import java.util.Comparator;
  import java.util.HashMap;
  import java.util.HashSet;
-import java.util.List;
  import java.util.LinkedList;
+import java.util.List;
  import java.util.Map;
  import java.util.Map.Entry;
  import java.util.Set;
@@ -50,9 +50,9 @@ import org.apache.hadoop.hive.ql.optimiz
  import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
  import org.apache.hadoop.hive.ql.plan.Explain;
  import org.apache.hadoop.hive.ql.plan.ExplainWork;
-import org.apache.hadoop.hive.ql.plan.TezWork;
  import org.apache.hadoop.hive.ql.plan.HiveOperation;
  import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hadoop.hive.ql.plan.TezWork;
  import org.apache.hadoop.hive.ql.plan.api.StageType;
  import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory;
  import org.apache.hadoop.hive.ql.session.SessionState;
@@ -69,7 +69,7 @@ import org.json.JSONObject;
  public class ExplainTask extends Task<ExplainWork> implements Serializable {
    private static final long serialVersionUID = 1L;
    public static final String EXPL_COLUMN_NAME = "Explain";
- private Set<Operator<?>> visitedOps = new HashSet<Operator<?>>();
+ private final Set<Operator<?>> visitedOps = new HashSet<Operator<?>>();
    private boolean isLogical = false;

    public ExplainTask() {
@@ -167,7 +167,7 @@ public class ExplainTask extends Task<Ex

    public JSONObject getJSONPlan(PrintStream out, String ast, List<Task<?>> tasks, Task<?> fetchTask,
        boolean jsonOutput, boolean isExtended, boolean appendTaskType) throws Exception {
-
+
      // If the user asked for a formatted output, dump the json output
      // in the output stream
      JSONObject outJSONObject = new JSONObject();
@@ -335,11 +335,9 @@ public class ExplainTask extends Task<Ex
      }

      final List<String> exceptions = new ArrayList<String>();
-
      Object delegate = SessionState.get().getActiveAuthorizer();
      if (delegate != null) {
        Class itface = SessionState.get().getAuthorizerInterface();
-
        Object authorizer = AuthorizationFactory.create(delegate, itface,
            new AuthorizationFactory.AuthorizationExceptionHandler() {
              public void exception(Exception exception) {
@@ -349,7 +347,7 @@ public class ExplainTask extends Task<Ex

        SessionState.get().setActiveAuthorizer(authorizer);
        try {
- Driver.doAuthorization(analyzer);
+ Driver.doAuthorization(analyzer, "");
        } finally {
          SessionState.get().setActiveAuthorizer(delegate);
        }
@@ -399,7 +397,7 @@ public class ExplainTask extends Task<Ex
          }
        }
        else if (ent.getValue() instanceof List) {
- if (ent.getValue() != null && !((List<?>)ent.getValue()).isEmpty()
+ if (ent.getValue() != null && !((List<?>)ent.getValue()).isEmpty()
              && ((List<?>)ent.getValue()).get(0) != null &&
              ((List<?>)ent.getValue()).get(0) instanceof TezWork.Dependency) {
            if (out != null) {
@@ -908,6 +906,7 @@ public class ExplainTask extends Task<Ex
     *
     */
    public class MethodComparator implements Comparator<Method> {
+ @Override
      public int compare(Method m1, Method m2) {
        return m1.getName().compareTo(m2.getName());
      }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java?rev=1611749&r1=1611748&r2=1611749&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java Fri Jul 18 18:19:14 2014
@@ -37,10 +37,11 @@ public interface HiveAuthorizationValida
     * @param hiveOpType
     * @param inputHObjs
     * @param outputHObjs
+ * @param context
     * @throws HiveAuthzPluginException
     * @throws HiveAccessControlException
     */
    void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
- List<HivePrivilegeObject> outputHObjs) throws HiveAuthzPluginException, HiveAccessControlException;
+ List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException;

  }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java?rev=1611749&r1=1611748&r2=1611749&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java Fri Jul 18 18:19:14 2014
@@ -146,11 +146,12 @@ public interface HiveAuthorizer {
     * @param hiveOpType
     * @param inputsHObjs
     * @param outputHObjs
+ * @param context
     * @throws HiveAuthzPluginException
     * @throws HiveAccessControlException
     */
    void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputsHObjs,
- List<HivePrivilegeObject> outputHObjs)
+ List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
        throws HiveAuthzPluginException, HiveAccessControlException;

    /**

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java?rev=1611749&r1=1611748&r2=1611749&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java Fri Jul 18 18:19:14 2014
@@ -80,8 +80,9 @@ public class HiveAuthorizerImpl implemen

    @Override
    public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
- List<HivePrivilegeObject> outputHObjs) throws HiveAuthzPluginException, HiveAccessControlException {
- authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs);
+ List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, context);
    }

    @Override

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzContext.java?rev=1611749&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzContext.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzContext.java Fri Jul 18 18:19:14 2014
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+
+/**
+ * Provides context information in authorization check call that can be used for
+ * auditing and/or authorization.
+ * It is an immutable class. Builder inner class is used instantiate it.
+ */
+@LimitedPrivate(value = { "" })
+@Evolving
+public final class HiveAuthzContext {
+
+ public enum CLIENT_TYPE {
+ HIVESERVER2, HIVECLI
+ };
+
+ public static class Builder {
+ private String userIpAddress;
+ private String sessionString;
+ private CLIENT_TYPE clientType;
+ private String commandString;
+
+ /**
+ * Get user's ip address. This is set only if the authorization
+ * api is invoked from a HiveServer2 instance in standalone mode.
+ * @return ip address
+ */
+ public String getUserIpAddress() {
+ return userIpAddress;
+ }
+ public void setUserIpAddress(String userIpAddress) {
+ this.userIpAddress = userIpAddress;
+ }
+ public String getSessionString() {
+ return sessionString;
+ }
+ public void setSessionString(String sessionString) {
+ this.sessionString = sessionString;
+ }
+ public CLIENT_TYPE getClientType() {
+ return clientType;
+ }
+ public void setClientType(CLIENT_TYPE clientType) {
+ this.clientType = clientType;
+ }
+ public String getCommandString() {
+ return commandString;
+ }
+ public void setCommandString(String commandString) {
+ this.commandString = commandString;
+ }
+ public HiveAuthzContext build(){
+ return new HiveAuthzContext(this);
+ }
+
+
+ }
+
+ private final String userIpAddress;
+ private final String sessionString;
+ private final CLIENT_TYPE clientType;
+ private final String commandString;
+
+ private HiveAuthzContext(Builder builder) {
+ this.userIpAddress = builder.userIpAddress;
+ this.sessionString = builder.sessionString;
+ this.clientType = builder.clientType;
+ this.commandString = builder.commandString;
+
+ }
+
+ public String getIpAddress() {
+ return userIpAddress;
+ }
+
+ public String getSessionString() {
+ return sessionString;
+ }
+
+ public CLIENT_TYPE getClientType() {
+ return clientType;
+ }
+
+ public String getCommandString() {
+ return commandString;
+ }
+
+ @Override
+ public String toString() {
+ return "HiveAuthzContext [userIpAddress=" + userIpAddress + ", sessionString=" + sessionString
+ + ", clientType=" + clientType + ", commandString=" + commandString + "]";
+ }
+
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java?rev=1611749&r1=1611748&r2=1611749&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java Fri Jul 18 18:19:14 2014
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.metastore.
  import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
  import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
@@ -56,13 +57,13 @@ public class SQLStdHiveAuthorizationVali

    @Override
    public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
- List<HivePrivilegeObject> outputHObjs) throws HiveAuthzPluginException,
- HiveAccessControlException {
+ List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
+ throws HiveAuthzPluginException, HiveAccessControlException {

      if (LOG.isDebugEnabled()) {
        String msg = "Checking privileges for operation " + hiveOpType + " by user "
            + authenticator.getUserName() + " on " + " input objects " + inputHObjs
- + " and output objects " + outputHObjs;
+ + " and output objects " + outputHObjs + ". Context Info: " + context;
        LOG.debug(msg);
      }


Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1611749&r1=1611748&r2=1611749&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Fri Jul 18 18:19:14 2014
@@ -19,8 +19,6 @@
  package org.apache.hadoop.hive.ql.session;
  import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;

-import com.google.common.base.Preconditions;
-
  import java.io.File;
  import java.io.IOException;
  import java.io.InputStream;
@@ -69,6 +67,8 @@ import org.apache.hadoop.hive.ql.util.Do
  import org.apache.hadoop.hive.shims.ShimLoader;
  import org.apache.hadoop.util.ReflectionUtils;

+import com.google.common.base.Preconditions;
+
  /**
   * SessionState encapsulates common data associated with a session.
   *
@@ -83,7 +83,7 @@ public class SessionState {
    private static final String LOCAL_SESSION_PATH_KEY = "_hive.local.session.path";
    private static final String HDFS_SESSION_PATH_KEY = "_hive.hdfs.session.path";
    private static final String TMP_TABLE_SPACE_KEY = "_hive.tmp_table_space";
- private Map<String, Map<String, Table>> tempTables = new HashMap<String, Map<String, Table>>();
+ private final Map<String, Map<String, Table>> tempTables = new HashMap<String, Map<String, Table>>();

    protected ClassLoader parentLoader;

@@ -173,6 +173,8 @@ public class SessionState {
    private final String CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER =
        "hive.internal.ss.authz.settings.applied.marker";

+ private String userIpAddress;
+
    /**
     * Lineage state.
     */
@@ -466,7 +468,7 @@ public class SessionState {
      FileSystem fs = p.getFileSystem(conf);
      p = new Path(fs.makeQualified(p).toString());
      FsPermission fsPermission = new FsPermission(Short.parseShort(perm.trim(), 8));
-
+
      if (!Utilities.createDirsWithPermission(conf, p, fsPermission)) {
        throw new IOException("Cannot create directory: "
                              + p.toString());
@@ -475,7 +477,7 @@ public class SessionState {
      // best effort to clean up if we don't shut down properly
      fs.deleteOnExit(p);
    }
-
+

    /**
     * Setup authentication and authorization plugins for this session.
@@ -1135,4 +1137,20 @@ public class SessionState {
    public Map<String, Map<String, Table>> getTempTables() {
      return tempTables;
    }
+
+ /**
+ * @return ip address for user running the query
+ */
+ public String getUserIpAddress() {
+ return userIpAddress;
+ }
+
+ /**
+ * set the ip address for user running the query
+ * @param userIpAddress
+ */
+ public void setUserIpAddress(String userIpAddress) {
+ this.userIpAddress = userIpAddress;
+ }
+
  }

Modified: hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java?rev=1611749&r1=1611748&r2=1611749&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java (original)
+++ hive/trunk/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java Fri Jul 18 18:19:14 2014
@@ -98,6 +98,7 @@ public class HiveSessionImpl implements
      hiveConf.setInt(ListSinkOperator.OUTPUT_PROTOCOL, protocol.getValue());

      sessionState = new SessionState(hiveConf, username);
+ sessionState.setUserIpAddress(ipAddress);
      sessionState.setIsHiveServerQuery(true);
      SessionState.start(sessionState);

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedJul 18, '14 at 6:19p
activeJul 18, '14 at 6:19p
posts1
users1
websitehive.apache.org

1 user in discussion

Thejas: 1 post

People

Translate

site design / logo © 2021 Grokbase