FAQ
Repository: hive
Updated Branches:
   refs/heads/master 44d1bdb54 -> 88005d246


HIVE-13264: JDBC driver makes 2 Open Session Calls for every open session (Nithin Mahesh reviewed by Vaibhav Gumashta)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/88005d24
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/88005d24
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/88005d24

Branch: refs/heads/master
Commit: 88005d246246d9ae307c8cf4df8f764a09220efc
Parents: 44d1bdb
Author: Vaibhav Gumashta <vgumashta@hortonworks.com>
Authored: Fri Jun 10 11:27:30 2016 -0700
Committer: Vaibhav Gumashta <vgumashta@hortonworks.com>
Committed: Fri Jun 10 11:27:30 2016 -0700

----------------------------------------------------------------------
  .../hive/minikdc/TestJdbcWithMiniKdc.java | 3 +-
  .../org/apache/hive/jdbc/HiveConnection.java | 93 ++++++++------------
  2 files changed, 41 insertions(+), 55 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/88005d24/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
----------------------------------------------------------------------
diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
index 71a08fb..daf0f7e 100644
--- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
+++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestJdbcWithMiniKdc.java
@@ -231,7 +231,8 @@ public class TestJdbcWithMiniKdc {
        // Expected error
        e.printStackTrace();
        assertTrue(e.getMessage().contains("Failed to validate proxy privilege"));
- assertTrue(e.getCause().getCause().getMessage().contains("is not allowed to impersonate"));
+ assertTrue(e.getCause().getCause().getCause().getMessage()
+ .contains("is not allowed to impersonate"));
      }
    }


http://git-wip-us.apache.org/repos/asf/hive/blob/88005d24/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
----------------------------------------------------------------------
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index 50dbd82..8ac040e 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -148,16 +148,6 @@ public class HiveConnection implements java.sql.Connection {
        fetchSize = Integer.parseInt(sessConfMap.get(JdbcConnectionParams.FETCH_SIZE));
      }

- if (isEmbeddedMode) {
- EmbeddedThriftBinaryCLIService embeddedClient = new EmbeddedThriftBinaryCLIService();
- embeddedClient.init(null);
- client = embeddedClient;
- } else {
- // open the client transport
- openTransport();
- // set up the client
- client = new TCLIService.Client(new TBinaryProtocol(transport));
- }
      // add supported protocols
      supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1);
      supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V2);
@@ -168,35 +158,34 @@ public class HiveConnection implements java.sql.Connection {
      supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V7);
      supportedProtocols.add(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V8);

- // open client session
- openSession();
-
- // Wrap the client with a thread-safe proxy to serialize the RPC calls
- client = newSynchronizedClient(client);
- }
+ if (isEmbeddedMode) {
+ EmbeddedThriftBinaryCLIService embeddedClient = new EmbeddedThriftBinaryCLIService();
+ embeddedClient.init(null);
+ client = embeddedClient;

- private void openTransport() throws SQLException {
- int maxRetries = 1;
- try {
- String strRetries = sessConfMap.get(JdbcConnectionParams.RETRIES);
- if (StringUtils.isNotBlank(strRetries)) {
- maxRetries = Integer.parseInt(strRetries);
+ // open client session
+ openSession();
+ } else {
+ int maxRetries = 1;
+ try {
+ String strRetries = sessConfMap.get(JdbcConnectionParams.RETRIES);
+ if (StringUtils.isNotBlank(strRetries)) {
+ maxRetries = Integer.parseInt(strRetries);
+ }
+ } catch(NumberFormatException e) { // Ignore the exception
        }
- } catch(NumberFormatException e) { // Ignore the exception
- }

- for (int numRetries = 0;;) {
+ for (int numRetries = 0;;) {
          try {
- assumeSubject =
- JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(sessConfMap
- .get(JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE));
- transport = isHttpTransportMode() ? createHttpTransport() : createBinaryTransport();
- if (!transport.isOpen()) {
- transport.open();
- logZkDiscoveryMessage("Connected to " + connParams.getHost() + ":" + connParams.getPort());
- }
+ // open the client transport
+ openTransport();
+ // set up the client
+ client = new TCLIService.Client(new TBinaryProtocol(transport));
+ // open client session
+ openSession();
+
            break;
- } catch (TTransportException e) {
+ } catch (Exception e) {
            LOG.warn("Failed to connect to " + connParams.getHost() + ":" + connParams.getPort());
            String errMsg = null;
            String warnMsg = "Could not open client transport with JDBC Uri: " + jdbcUriString + ": ";
@@ -221,7 +210,22 @@ public class HiveConnection implements java.sql.Connection {
              LOG.warn(warnMsg + e.getMessage() + " Retrying " + numRetries + " of " + maxRetries);
            }
          }
+ }
      }
+
+ // Wrap the client with a thread-safe proxy to serialize the RPC calls
+ client = newSynchronizedClient(client);
+ }
+
+ private void openTransport() throws Exception {
+ assumeSubject =
+ JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(sessConfMap
+ .get(JdbcConnectionParams.AUTH_KERBEROS_AUTH_TYPE));
+ transport = isHttpTransportMode() ? createHttpTransport() : createBinaryTransport();
+ if (!transport.isOpen()) {
+ transport.open();
+ logZkDiscoveryMessage("Connected to " + connParams.getHost() + ":" + connParams.getPort());
+ }
    }

    public String getConnectedUrl() {
@@ -248,26 +252,7 @@ public class HiveConnection implements java.sql.Connection {
      boolean useSsl = isSslConnection();
      // Create an http client from the configs
      httpClient = getHttpClient(useSsl);
- try {
- transport = new THttpClient(getServerHttpUrl(useSsl), httpClient);
- // We'll call an open/close here to send a test HTTP message to the server. Any
- // TTransportException caused by trying to connect to a non-available peer are thrown here.
- // Bubbling them up the call hierarchy so that a retry can happen in openTransport,
- // if dynamic service discovery is configured.
- TCLIService.Iface client = new TCLIService.Client(new TBinaryProtocol(transport));
- TOpenSessionResp openResp = client.OpenSession(new TOpenSessionReq());
- if (openResp != null) {
- client.CloseSession(new TCloseSessionReq(openResp.getSessionHandle()));
- }
- }
- catch (TException e) {
- LOG.info("JDBC Connection Parameters used : useSSL = " + useSsl + " , httpPath = " +
- sessConfMap.get(JdbcConnectionParams.HTTP_PATH) + " Authentication type = " +
- sessConfMap.get(JdbcConnectionParams.AUTH_TYPE));
- String msg = "Could not create http connection to " +
- jdbcUriString + ". " + e.getMessage();
- throw new TTransportException(msg, e);
- }
+ transport = new THttpClient(getServerHttpUrl(useSsl), httpClient);
      return transport;
    }

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 1 of 1 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedJun 10, '16 at 6:28p
activeJun 10, '16 at 6:28p
posts1
users1
websitehive.apache.org

1 user in discussion

Vgumashta: 1 post

People

Translate

site design / logo © 2021 Grokbase