FAQ
Author: vgumashta
Date: Thu Apr 9 18:25:53 2015
New Revision: 1672443

URL: http://svn.apache.org/r1672443
Log:
HIVE-9709: Hive should support replaying cookie from JDBC driver for beeline (Hari Sankar Sivarama Subramaniyan reviewed by Vaibhav Gumashta)

Modified:
     hive/trunk/bin/beeline
     hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
     hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java
     hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
     hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java
     hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
     hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java
     hive/trunk/pom.xml

Modified: hive/trunk/bin/beeline
URL: http://svn.apache.org/viewvc/hive/trunk/bin/beeline?rev=1672443&r1=1672442&r2=1672443&view=diff
==============================================================================
--- hive/trunk/bin/beeline (original)
+++ hive/trunk/bin/beeline Thu Apr 9 18:25:53 2015
@@ -18,4 +18,8 @@
  bin=`dirname "$0"`
  bin=`cd "$bin"; pwd`

+# Set Hadoop User classpath to true so that httpclient jars are taken from
+# hive lib instead of hadoop lib.
+export HADOOP_USER_CLASSPATH_FIRST=true
+
  . "$bin"/hive --service beeline "$@"

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java?rev=1672443&r1=1672442&r2=1672443&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestSSL.java Thu Apr 9 18:25:53 2015
@@ -155,7 +155,7 @@ public class TestSSL {
          cause = cause.getCause();
        }
        Assert.assertEquals("org.apache.http.NoHttpResponseException", cause.getClass().getName());
- Assert.assertEquals("The target server failed to respond", cause.getMessage());
+ Assert.assertTrue(cause.getMessage().contains("failed to respond"));
      }
      miniHS2.stop();
    }

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java?rev=1672443&r1=1672442&r2=1672443&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIService.java Thu Apr 9 18:25:53 2015
@@ -160,7 +160,7 @@ public class TestThriftHttpCLIService ex
      String httpUrl = transportMode + "://" + host + ":" + port +
          "/" + thriftHttpPath + "/";
      httpClient.addRequestInterceptor(
- new HttpBasicAuthInterceptor(USERNAME, PASSWORD));
+ new HttpBasicAuthInterceptor(USERNAME, PASSWORD, null, null));
      return new THttpClient(httpUrl, httpClient);
    }


Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java?rev=1672443&r1=1672442&r2=1672443&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java Thu Apr 9 18:25:53 2015
@@ -50,10 +50,11 @@ import java.util.concurrent.TimeUnit;
  import javax.security.sasl.Sasl;
  import javax.security.sasl.SaslException;

+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.protocol.HttpContext;
  import org.apache.commons.logging.Log;
  import org.apache.commons.logging.LogFactory;
  import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.shims.ShimLoader;
  import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
  import org.apache.hive.service.auth.HiveAuthFactory;
  import org.apache.hive.service.auth.KerberosSaslHelper;
@@ -73,9 +74,17 @@ import org.apache.hive.service.cli.thrif
  import org.apache.hive.service.cli.thrift.TRenewDelegationTokenResp;
  import org.apache.hive.service.cli.thrift.TSessionHandle;
  import org.apache.http.HttpRequestInterceptor;
-import org.apache.http.conn.scheme.Scheme;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.CookieStore;
+import org.apache.http.client.ServiceUnavailableRetryStrategy;
+import org.apache.http.config.Registry;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.conn.socket.ConnectionSocketFactory;
  import org.apache.http.conn.ssl.SSLSocketFactory;
-import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.impl.client.BasicCookieStore;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.impl.conn.BasicHttpClientConnectionManager;
  import org.apache.thrift.TException;
  import org.apache.thrift.protocol.TBinaryProtocol;
  import org.apache.thrift.transport.THttpClient;
@@ -236,7 +245,7 @@ public class HiveConnection implements j
    }

    private TTransport createHttpTransport() throws SQLException, TTransportException {
- DefaultHttpClient httpClient;
+ CloseableHttpClient httpClient;
      boolean useSsl = isSslConnection();
      // Create an http client from the configs
      httpClient = getHttpClient(useSsl);
@@ -260,35 +269,76 @@ public class HiveConnection implements j
      return transport;
    }

- private DefaultHttpClient getHttpClient(Boolean useSsl) throws SQLException {
- DefaultHttpClient httpClient = new DefaultHttpClient();
+ private CloseableHttpClient getHttpClient(Boolean useSsl) throws SQLException {
+ boolean isCookieEnabled = sessConfMap.get(JdbcConnectionParams.COOKIE_AUTH) == null ||
+ (!JdbcConnectionParams.COOKIE_AUTH_FALSE.equalsIgnoreCase(
+ sessConfMap.get(JdbcConnectionParams.COOKIE_AUTH)));
+ String cookieName = sessConfMap.get(JdbcConnectionParams.COOKIE_NAME) == null ?
+ JdbcConnectionParams.DEFAULT_COOKIE_NAMES_HS2 :
+ sessConfMap.get(JdbcConnectionParams.COOKIE_NAME);
+ CookieStore cookieStore = isCookieEnabled ? new BasicCookieStore() : null;
+ HttpClientBuilder httpClientBuilder;
      // Request interceptor for any request pre-processing logic
      HttpRequestInterceptor requestInterceptor;
- // If Kerberos
+
+ // Configure http client for kerberos/password based authentication
      if (isKerberosAuthMode()) {
        /**
         * Add an interceptor which sets the appropriate header in the request.
         * It does the kerberos authentication and get the final service ticket,
         * for sending to the server before every request.
         * In https mode, the entire information is encrypted
- * TODO: Optimize this with a mix of kerberos + using cookie.
         */
        requestInterceptor =
            new HttpKerberosRequestInterceptor(sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL),
- host, getServerHttpUrl(useSsl), assumeSubject);
+ host, getServerHttpUrl(useSsl), assumeSubject, cookieStore, cookieName);
      }
      else {
        /**
         * Add an interceptor to pass username/password in the header.
         * In https mode, the entire information is encrypted
         */
- requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword());
+ requestInterceptor = new HttpBasicAuthInterceptor(getUserName(), getPassword(),
+ cookieStore, cookieName);
+ }
+ // Configure http client for cookie based authentication
+ if (isCookieEnabled) {
+ // Create a http client with a retry mechanism when the server returns a status code of 401.
+ httpClientBuilder =
+ HttpClients.custom().setServiceUnavailableRetryStrategy(
+ new ServiceUnavailableRetryStrategy() {
+
+ @Override
+ public boolean retryRequest(
+ final HttpResponse response,
+ final int executionCount,
+ final HttpContext context) {
+ int statusCode = response.getStatusLine().getStatusCode();
+ boolean ret = statusCode == 401 && executionCount <= 1;
+
+ // Set the context attribute to true which will be interpreted by the request interceptor
+ if (ret) {
+ context.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_TRUE);
+ }
+ return ret;
+ }
+
+ @Override
+ public long getRetryInterval() {
+ // Immediate retry
+ return 0;
+ }
+ });
+ } else {
+ httpClientBuilder = HttpClientBuilder.create();
      }
- // Configure httpClient for SSL
+ // Add the request interceptor to the client builder
+ httpClientBuilder.addInterceptorFirst(requestInterceptor);
+ // Configure http client for SSL
      if (useSsl) {
        String sslTrustStorePath = sessConfMap.get(JdbcConnectionParams.SSL_TRUST_STORE);
        String sslTrustStorePassword = sessConfMap.get(
- JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD);
+ JdbcConnectionParams.SSL_TRUST_STORE_PASSWORD);
        KeyStore sslTrustStore;
        SSLSocketFactory socketFactory;
        /**
@@ -312,21 +362,25 @@ public class HiveConnection implements j
            // Pick trust store config from the given path
            sslTrustStore = KeyStore.getInstance(JdbcConnectionParams.SSL_TRUST_STORE_TYPE);
            sslTrustStore.load(new FileInputStream(sslTrustStorePath),
- sslTrustStorePassword.toCharArray());
+ sslTrustStorePassword.toCharArray());
            socketFactory = new SSLSocketFactory(sslTrustStore);
          }
          socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
- Scheme sslScheme = new Scheme("https", 443, socketFactory);
- httpClient.getConnectionManager().getSchemeRegistry().register(sslScheme);
+
+ final Registry<ConnectionSocketFactory> registry =
+ RegistryBuilder.<ConnectionSocketFactory>create()
+ .register("https", socketFactory)
+ .build();
+
+ httpClientBuilder.setConnectionManager(new BasicHttpClientConnectionManager(registry));
        }
        catch (Exception e) {
          String msg = "Could not create an https connection to " +
- jdbcUriString + ". " + e.getMessage();
+ jdbcUriString + ". " + e.getMessage();
          throw new SQLException(msg, " 08S01", e);
        }
      }
- httpClient.addRequestInterceptor(requestInterceptor);
- return httpClient;
+ return httpClientBuilder.build();
    }

    /**

Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java?rev=1672443&r1=1672442&r2=1672443&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java Thu Apr 9 18:25:53 2015
@@ -25,6 +25,8 @@ import org.apache.http.HttpException;
  import org.apache.http.HttpRequest;
  import org.apache.http.HttpRequestInterceptor;
  import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.CookieStore;
+import org.apache.http.client.protocol.ClientContext;
  import org.apache.http.impl.auth.AuthSchemeBase;
  import org.apache.http.impl.auth.BasicScheme;
  import org.apache.http.protocol.HttpContext;
@@ -37,20 +39,42 @@ import org.apache.http.protocol.HttpCont
  public class HttpBasicAuthInterceptor implements HttpRequestInterceptor {
    UsernamePasswordCredentials credentials;
    AuthSchemeBase authScheme;
+ CookieStore cookieStore;
+ boolean isCookieEnabled;
+ String cookieName;

- public HttpBasicAuthInterceptor(String username, String password) {
+ public HttpBasicAuthInterceptor(String username, String password, CookieStore cookieStore,
+ String cn) {
      if(username != null){
        credentials = new UsernamePasswordCredentials(username, password);
      }
      authScheme = new BasicScheme();
+ this.cookieStore = cookieStore;
+ isCookieEnabled = (cookieStore != null);
+ cookieName = cn;
    }

    @Override
    public void process(HttpRequest httpRequest, HttpContext httpContext)
        throws HttpException, IOException {
- Header basicAuthHeader = authScheme.authenticate(
- credentials, httpRequest, httpContext);
- httpRequest.addHeader(basicAuthHeader);
+ if (isCookieEnabled) {
+ httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore);
+ }
+ // Add the authentication details under the following scenarios:
+ // 1. Cookie Authentication is disabled OR
+ // 2. The first time when the request is sent OR
+ // 3. The server returns a 401, which sometimes means the cookie has expired
+ if (!isCookieEnabled || ((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null &&
+ (cookieStore == null || (cookieStore != null &&
+ Utils.needToSendCredentials(cookieStore, cookieName)))) ||
+ (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null &&
+ httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY).
+ equals(Utils.HIVE_SERVER2_RETRY_TRUE)))) {
+ Header basicAuthHeader = authScheme.authenticate(credentials, httpRequest, httpContext);
+ httpRequest.addHeader(basicAuthHeader);
+ }
+ if (isCookieEnabled) {
+ httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_FALSE);
+ }
    }
-
  }

Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java?rev=1672443&r1=1672442&r2=1672443&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java Thu Apr 9 18:25:53 2015
@@ -25,6 +25,8 @@ import org.apache.hive.service.auth.Http
  import org.apache.http.HttpException;
  import org.apache.http.HttpRequest;
  import org.apache.http.HttpRequestInterceptor;
+import org.apache.http.client.CookieStore;
+import org.apache.http.client.protocol.ClientContext;
  import org.apache.http.protocol.HttpContext;

  /**
@@ -40,31 +42,59 @@ public class HttpKerberosRequestIntercep
    String host;
    String serverHttpUrl;
    boolean assumeSubject;
+ CookieStore cookieStore;
+ boolean isCookieEnabled;
+ String cookieName;

    // A fair reentrant lock
    private static ReentrantLock kerberosLock = new ReentrantLock(true);

    public HttpKerberosRequestInterceptor(String principal, String host,
- String serverHttpUrl, boolean assumeSubject) {
+ String serverHttpUrl, boolean assumeSubject, CookieStore cs, String cn) {
      this.principal = principal;
      this.host = host;
      this.serverHttpUrl = serverHttpUrl;
      this.assumeSubject = assumeSubject;
+ this.cookieStore = cs;
+ isCookieEnabled = (cs != null);
+ cookieName = cn;
    }

    @Override
    public void process(HttpRequest httpRequest, HttpContext httpContext)
        throws HttpException, IOException {
      String kerberosAuthHeader;
+
      try {
        // Generate the service ticket for sending to the server.
        // Locking ensures the tokens are unique in case of concurrent requests
        kerberosLock.lock();
- kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket(
- principal, host, serverHttpUrl, assumeSubject);
- // Set the session key token (Base64 encoded) in the headers
- httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " +
- HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader);
+ // If cookie based authentication is allowed, generate ticket only when necessary.
+ // The necessary condition is either when there are no server side cookies in the
+ // cookiestore which can be send back or when the server returns a 401 error code
+ // indicating that the previous cookie has expired.
+ if (isCookieEnabled) {
+ httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore);
+ }
+ // Generate the kerberos ticket under the following scenarios:
+ // 1. Cookie Authentication is disabled OR
+ // 2. The first time when the request is sent OR
+ // 3. The server returns a 401, which sometimes means the cookie has expired
+ if (!isCookieEnabled || ((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null &&
+ (cookieStore == null || (cookieStore != null &&
+ Utils.needToSendCredentials(cookieStore, cookieName)))) ||
+ (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null &&
+ httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY).
+ equals(Utils.HIVE_SERVER2_RETRY_TRUE)))) {
+ kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket(
+ principal, host, serverHttpUrl, assumeSubject);
+ // Set the session key token (Base64 encoded) in the headers
+ httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " +
+ HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader);
+ }
+ if (isCookieEnabled) {
+ httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, Utils.HIVE_SERVER2_RETRY_FALSE);
+ }
      } catch (Exception e) {
        throw new HttpException(e.getMessage(), e);
      }

Modified: hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java?rev=1672443&r1=1672442&r2=1672443&view=diff
==============================================================================
--- hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java (original)
+++ hive/trunk/jdbc/src/java/org/apache/hive/jdbc/Utils.java Thu Apr 9 18:25:53 2015
@@ -34,6 +34,8 @@ import org.apache.commons.logging.LogFac
  import org.apache.hive.service.cli.HiveSQLException;
  import org.apache.hive.service.cli.thrift.TStatus;
  import org.apache.hive.service.cli.thrift.TStatusCode;
+import org.apache.http.client.CookieStore;
+import org.apache.http.cookie.Cookie;

  public class Utils {
    public static final Log LOG = LogFactory.getLog(Utils.class.getName());
@@ -56,6 +58,11 @@ public class Utils {

    private static final String URI_HIVE_PREFIX = "hive2:";

+ // This value is set to true by the setServiceUnavailableRetryStrategy() when the server returns 401
+ static final String HIVE_SERVER2_RETRY_KEY = "hive.server2.retryserver";
+ static final String HIVE_SERVER2_RETRY_TRUE = "true";
+ static final String HIVE_SERVER2_RETRY_FALSE = "false";
+
    public static class JdbcConnectionParams {
      // Note on client side parameter naming convention:
      // Prefer using a shorter camelCase param name instead of using the same name as the
@@ -98,6 +105,11 @@ public class Utils {
      // Default namespace value on ZooKeeper.
      // This value is used if the param "zooKeeperNamespace" is not specified in the JDBC Uri.
      static final String ZOOKEEPER_DEFAULT_NAMESPACE = "hiveserver2";
+ static final String COOKIE_AUTH = "cookieAuth";
+ static final String COOKIE_AUTH_FALSE = "false";
+ static final String COOKIE_NAME = "cookieName";
+ // The default value of the cookie name when CookieAuth=true
+ static final String DEFAULT_COOKIE_NAMES_HS2 = "hive.server2.auth";

      // Non-configurable params:
      // Currently supports JKS keystore format
@@ -560,4 +572,28 @@ public class Utils {
      }
      return version;
    }
+
+ /**
+ * The function iterates through the list of cookies in the cookiestore and tries to
+ * match them with the cookieName. If there is a match, the cookieStore already
+ * has a valid cookie and the client need not send Credentials for validation purpose.
+ * @param cookieStore The cookie Store
+ * @param cookieName Name of the cookie which needs to be validated
+ * @return true or false based on whether the client needs to send the credentials or
+ * not to the server.
+ */
+ static boolean needToSendCredentials(CookieStore cookieStore, String cookieName) {
+ if (cookieName == null || cookieStore == null) {
+ return true;
+ }
+
+ List<Cookie> cookies = cookieStore.getCookies();
+
+ for (Cookie c : cookies) {
+ if (c.getName().equals(cookieName)) {
+ return false;
+ }
+ }
+ return true;
+ }
  }

Modified: hive/trunk/pom.xml
URL: http://svn.apache.org/viewvc/hive/trunk/pom.xml?rev=1672443&r1=1672442&r2=1672443&view=diff
==============================================================================
--- hive/trunk/pom.xml (original)
+++ hive/trunk/pom.xml Thu Apr 9 18:25:53 2015
@@ -124,8 +124,8 @@
      <hbase.hadoop1.version>0.98.9-hadoop1</hbase.hadoop1.version>
      <hbase.hadoop2.version>0.98.9-hadoop2</hbase.hadoop2.version>
      <!-- httpcomponents are not always in version sync -->
- <httpcomponents.client.version>4.2.5</httpcomponents.client.version>
- <httpcomponents.core.version>4.2.5</httpcomponents.core.version>
+ <httpcomponents.client.version>4.4</httpcomponents.client.version>
+ <httpcomponents.core.version>4.4</httpcomponents.core.version>
      <ivy.version>2.4.0</ivy.version>
      <jackson.version>1.9.2</jackson.version>
      <javaewah.version>0.3.2</javaewah.version>
@@ -1083,6 +1083,16 @@
              <groupId>org.apache.hadoop</groupId>
              <artifactId>hadoop-common</artifactId>
              <version>${hadoop-23.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpcore</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ </exclusion>
+ </exclusions>
            </dependency>
            <dependency>
              <groupId>org.apache.hadoop</groupId>

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 1 of 1 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedApr 9, '15 at 6:25p
activeApr 9, '15 at 6:25p
posts1
users1
websitehive.apache.org

1 user in discussion

Vgumashta: 1 post

People

Translate

site design / logo © 2021 Grokbase