FAQ
Repository: hive
Updated Branches:
   refs/heads/master 6ad405fc8 -> ab1f405d1


Revert "HIVE-12481: Occasionally "Request is a replay" will be thrown from HS2 (Aihua Xu, reviewed by Yongzhi Chen)"

This reverts commit 361d72d963053eafe0ee9f6c6e24d127708def02.


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ab1f405d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ab1f405d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ab1f405d

Branch: refs/heads/master
Commit: ab1f405d192f65f7443bb08785a35f906c55fcfe
Parents: 6ad405f
Author: Aihua Xu <aihuaxu@apache.org>
Authored: Wed Jan 13 14:17:56 2016 -0500
Committer: Aihua Xu <aihuaxu@apache.org>
Committed: Wed Jan 13 14:17:56 2016 -0500

----------------------------------------------------------------------
  .../org/apache/hadoop/hive/conf/HiveConf.java | 4 --
  .../hive/thrift/TestHadoopAuthBridge23.java | 54 +-------------------
  .../hadoop/hive/metastore/HiveMetaStore.java | 3 +-
  .../hive/service/auth/HiveAuthFactory.java | 3 +-
  .../hive/thrift/HadoopThriftAuthBridge.java | 33 ++----------
  5 files changed, 9 insertions(+), 88 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ab1f405d/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 1bcdc5f..081c1fe 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1951,10 +1951,6 @@ public class HiveConf extends Configuration {
          "HttpOnly attribute of the HS2 generated cookie."),

      // binary transport settings
- HIVE_SERVER2_THRIFT_AUTH_MAX_RETRIES("hive.server2.thrift.auth.max.retries", 1,
- "Number of maximum retries to authenticate HS2 server or HMS server against Kerberos service.\n"
- + "This is to mitigate some false alarm auth issues, such that concurrent query executions\n"
- + "against single HS2 server may fail to authenticate due to 'Request is a replay'."),
      HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000,
          "Port number of HiveServer2 Thrift interface when hive.server2.transport.mode is 'binary'."),
      HIVE_SERVER2_THRIFT_SASL_QOP("hive.server2.thrift.sasl.qop", "auth",

http://git-wip-us.apache.org/repos/asf/hive/blob/ab1f405d/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java
index 64fa0ec..6d0776a 100644
--- a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java
+++ b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hive.thrift;


  import junit.framework.TestCase;
-
  import org.apache.hadoop.conf.Configuration;
  import org.apache.hadoop.fs.Path;
  import org.apache.hadoop.hive.conf.HiveConf;
@@ -42,12 +41,8 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecret
  import org.apache.hadoop.security.token.delegation.DelegationKey;
  import org.apache.hadoop.util.StringUtils;
  import org.apache.thrift.transport.TSaslServerTransport;
-import org.apache.thrift.transport.TTransport;
  import org.apache.thrift.transport.TTransportException;
  import org.apache.thrift.transport.TTransportFactory;
-import org.junit.Test;
-import org.mockito.Mockito;
-import static org.mockito.Mockito.*;

  import java.io.ByteArrayInputStream;
  import java.io.DataInputStream;
@@ -83,7 +78,7 @@ public class TestHadoopAuthBridge23 extends TestCase {
          super();
        }
        @Override
- public TTransportFactory createTransportFactory(Map<String, String> saslProps, int authMaxRetries)
+ public TTransportFactory createTransportFactory(Map<String, String> saslProps)
        throws TTransportException {
          TSaslServerTransport.Factory transFactory =
            new TSaslServerTransport.Factory();
@@ -92,7 +87,7 @@ public class TestHadoopAuthBridge23 extends TestCase {
              saslProps,
              new SaslDigestCallbackHandler(secretManager));

- return new TUGIAssumingTransportFactory(transFactory, realUgi, authMaxRetries);
+ return new TUGIAssumingTransportFactory(transFactory, realUgi);
        }
        static DelegationTokenStore TOKEN_STORE = new MemoryTokenStore();

@@ -235,51 +230,6 @@ public class TestHadoopAuthBridge23 extends TestCase {
      obtainTokenAndAddIntoUGI(clientUgi, "tokenForFooTablePartition");
    }

- /**
- * Verifies that the expected result returned after 2 unsuccessful retries
- * @throws Exception
- */
- @Test
- public void testRetryGetTransport() throws Exception {
- TTransport inputTransport = Mockito.mock(TTransport.class);
- TTransport expectedTransport = Mockito.mock(TTransport.class);
- TTransportFactory mockWrapped = Mockito.mock(TTransportFactory.class);
- UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
- when(mockWrapped.getTransport(any(TTransport.class)))
- .thenThrow(new RuntimeException(new TTransportException()))
- .thenThrow(new RuntimeException(new TTransportException()))
- .thenReturn(expectedTransport);
-
- TTransportFactory factory = new HadoopThriftAuthBridge.Server.TUGIAssumingTransportFactory(mockWrapped, ugi, 3);
- TTransport transport = factory.getTransport(inputTransport);
-
- assertEquals(expectedTransport, transport);
- verify(mockWrapped, times(3)).getTransport(any(TTransport.class));
- }
-
- /**
- * Verifies exception is thrown after 3 unsuccessful retries
- * @throws Exception
- */
- @Test
- public void testRetryGetTransport2() throws Exception {
- Exception expectedException = new RuntimeException(new TTransportException());
- TTransport inputTransport = Mockito.mock(TTransport.class);
- TTransportFactory mockWrapped = Mockito.mock(TTransportFactory.class);
- UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
- when(mockWrapped.getTransport(any(TTransport.class)))
- .thenThrow(expectedException);
-
- try {
- TTransportFactory factory = new HadoopThriftAuthBridge.Server.TUGIAssumingTransportFactory(mockWrapped, ugi, 3);
- factory.getTransport(inputTransport);
- } catch(Exception e) {
- assertEquals(expectedException, e);
- } finally {
- verify(mockWrapped, times(3)).getTransport(any(TTransport.class));
- }
- }
-
    public void testMetastoreProxyUser() throws Exception {
      setup();


http://git-wip-us.apache.org/repos/asf/hive/blob/ab1f405d/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 9ea9375..ace644b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -6189,9 +6189,8 @@ public class HiveMetaStore extends ThriftHiveMetastore {
              conf.getVar(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL));
          // start delegation token manager
          saslServer.startDelegationTokenSecretManager(conf, baseHandler, ServerMode.METASTORE);
- int authMaxRetries = conf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_AUTH_MAX_RETRIES);
          transFactory = saslServer.createTransportFactory(
- MetaStoreUtils.getMetaStoreSaslProperties(conf), authMaxRetries);
+ MetaStoreUtils.getMetaStoreSaslProperties(conf));
          processor = saslServer.wrapProcessor(
            new ThriftHiveMetastore.Processor<IHMSHandler>(handler));
          LOG.info("Starting DB backed MetaStore Server in Secure Mode");

http://git-wip-us.apache.org/repos/asf/hive/blob/ab1f405d/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
index 0c7c9ee..3471f12 100644
--- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
+++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java
@@ -140,8 +140,7 @@ public class HiveAuthFactory {
      TTransportFactory transportFactory;
      if (authTypeStr.equalsIgnoreCase(AuthTypes.KERBEROS.getAuthName())) {
        try {
- int authMaxRetries = conf.getIntVar(ConfVars.HIVE_SERVER2_THRIFT_AUTH_MAX_RETRIES);
- transportFactory = saslServer.createTransportFactory(getSaslProperties(), authMaxRetries);
+ transportFactory = saslServer.createTransportFactory(getSaslProperties());
        } catch (TTransportException e) {
          throw new LoginException(e.getMessage());
        }

http://git-wip-us.apache.org/repos/asf/hive/blob/ab1f405d/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
----------------------------------------------------------------------
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
index 731555c..6fe5969 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge.java
@@ -26,7 +26,6 @@ import java.security.PrivilegedAction;
  import java.security.PrivilegedExceptionAction;
  import java.util.Locale;
  import java.util.Map;
-import java.util.Random;

  import javax.security.auth.callback.Callback;
  import javax.security.auth.callback.CallbackHandler;
@@ -45,6 +44,7 @@ import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  import org.apache.hadoop.conf.Configuration;
  import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hive.shims.ShimLoader;
  import org.apache.hadoop.hive.shims.Utils;
  import org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport;
  import org.apache.hadoop.security.SaslRpcServer;
@@ -367,7 +367,7 @@ public abstract class HadoopThriftAuthBridge {
       * @param saslProps Map of SASL properties
       */

- public TTransportFactory createTransportFactory(Map<String, String> saslProps, int authMaxRetries)
+ public TTransportFactory createTransportFactory(Map<String, String> saslProps)
          throws TTransportException {
        // Parse out the kerberos principal, host, realm.
        String kerberosName = realUgi.getUserName();
@@ -386,7 +386,7 @@ public abstract class HadoopThriftAuthBridge {
            null, SaslRpcServer.SASL_DEFAULT_REALM,
            saslProps, new SaslDigestCallbackHandler(secretManager));

- return new TUGIAssumingTransportFactory(transFactory, realUgi, authMaxRetries);
+ return new TUGIAssumingTransportFactory(transFactory, realUgi);
      }

      /**
@@ -721,14 +721,12 @@ public abstract class HadoopThriftAuthBridge {
      static class TUGIAssumingTransportFactory extends TTransportFactory {
        private final UserGroupInformation ugi;
        private final TTransportFactory wrapped;
- private final int authMaxRetries;

- public TUGIAssumingTransportFactory(TTransportFactory wrapped, UserGroupInformation ugi, int authMaxRetries) {
+ public TUGIAssumingTransportFactory(TTransportFactory wrapped, UserGroupInformation ugi) {
          assert wrapped != null;
          assert ugi != null;
          this.wrapped = wrapped;
          this.ugi = ugi;
- this.authMaxRetries = authMaxRetries;
        }


@@ -737,28 +735,7 @@ public abstract class HadoopThriftAuthBridge {
          return ugi.doAs(new PrivilegedAction<TTransport>() {
            @Override
            public TTransport run() {
- // Retry the authentication after sleeping for random microseconds
- short numRetries = 0;
- Random rand = new Random();
-
- while (true) {
- try {
- return wrapped.getTransport(trans);
- } catch(RuntimeException e) {
- if (e.getCause() instanceof TTransportException) {
- if (++numRetries < authMaxRetries) {
- LOG.warn(e.getMessage());
- try {
- Thread.sleep(rand.nextInt(1000));
- } catch (InterruptedException ie) {
- }
- continue;
- }
- }
-
- throw e;
- }
- }
+ return wrapped.getTransport(trans);
            }
          });
        }

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 1 of 1 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedJan 13, '16 at 7:18p
activeJan 13, '16 at 7:18p
posts1
users1
websitehive.apache.org

1 user in discussion

Aihuaxu: 1 post

People

Translate

site design / logo © 2021 Grokbase