FAQ
Repository: hive
Updated Branches:
   refs/heads/master 145e253df -> fb230f9df


http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
index 56a9c18..eb3ab21 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
@@ -280,11 +280,11 @@ public class OperationManager extends AbstractService {
      return getOperation(opHandle).getNextRowSet(orientation, maxRows);
    }

- public RowSet getOperationLogRowSet(OperationHandle opHandle,
- FetchOrientation orientation, long maxRows, HiveConf hConf)
- throws HiveSQLException {
+ public RowSet getOperationLogRowSet(OperationHandle opHandle, FetchOrientation orientation,
+ long maxRows, HiveConf hConf) throws HiveSQLException {
      TableSchema tableSchema = new TableSchema(getLogSchema());
- RowSet rowSet = RowSetFactory.create(tableSchema, getOperation(opHandle).getProtocolVersion());
+ RowSet rowSet =
+ RowSetFactory.create(tableSchema, getOperation(opHandle).getProtocolVersion(), false);

      if (hConf.getBoolVar(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED) == false) {
        LOG.warn("Try to get operation log when hive.server2.logging.operation.enabled is false, no log will be returned. ");
@@ -296,7 +296,6 @@ public class OperationManager extends AbstractService {
        throw new HiveSQLException("Couldn't find log associated with operation handle: " + opHandle);
      }

-
      // read logs
      List<String> logs;
      try {
@@ -305,10 +304,9 @@ public class OperationManager extends AbstractService {
        throw new HiveSQLException(e.getMessage(), e.getCause());
      }

-
      // convert logs to RowSet
      for (String log : logs) {
- rowSet.addRow(new String[] {log});
+ rowSet.addRow(new String[] { log });
      }

      return rowSet;

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index ce06c1c..b921e6e 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.CommandNeedRetryException;
  import org.apache.hadoop.hive.ql.Driver;
  import org.apache.hadoop.hive.ql.QueryDisplay;
  import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
  import org.apache.hadoop.hive.ql.exec.Task;
  import org.apache.hadoop.hive.ql.metadata.Hive;
  import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -52,6 +53,7 @@ import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
  import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
  import org.apache.hadoop.hive.serde2.objectinspector.StructField;
  import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.thrift.ThriftJDBCBinarySerDe;
  import org.apache.hadoop.hive.shims.Utils;
  import org.apache.hadoop.io.BytesWritable;
  import org.apache.hadoop.security.UserGroupInformation;
@@ -356,12 +358,26 @@ public class SQLOperation extends ExecuteStatementOperation {
    private transient final List<Object> convey = new ArrayList<Object>();

    @Override
- public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
+ public RowSet getNextRowSet(FetchOrientation orientation, long maxRows)
+ throws HiveSQLException {
+
+ HiveConf hiveConf = getConfigForOperation();
      validateDefaultFetchOrientation(orientation);
      assertState(new ArrayList<OperationState>(Arrays.asList(OperationState.FINISHED)));

- RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion());
+ FetchTask fetchTask = driver.getFetchTask();
+ boolean isBlobBased = false;

+ if (fetchTask != null && fetchTask.getWork().isHiveServerQuery() && HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_SERIALIZE_IN_TASKS)
+ && (fetchTask.getTblDesc().getSerdeClassName().equalsIgnoreCase(ThriftJDBCBinarySerDe.class
+ .getName()))) {
+ // Just fetch one blob if we've serialized thrift objects in final tasks
+ maxRows = 1;
+ isBlobBased = true;
+ }
+ driver.setMaxRows((int) maxRows);
+ RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion(), isBlobBased);
      try {
        /* if client is requesting fetch-from-start and its not the first time reading from this operation
         * then reset the fetch position to beginning

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index 0f36cd6..ce50967 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -39,7 +39,6 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
  import org.apache.hadoop.hive.metastore.IMetaStoreClient;
  import org.apache.hadoop.hive.metastore.api.MetaException;
  import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.exec.FetchFormatter;
  import org.apache.hadoop.hive.ql.exec.ListSinkOperator;
  import org.apache.hadoop.hive.ql.exec.Utilities;
  import org.apache.hadoop.hive.ql.history.HiveHistory;
@@ -47,6 +46,8 @@ import org.apache.hadoop.hive.ql.metadata.Hive;
  import org.apache.hadoop.hive.ql.metadata.HiveException;
  import org.apache.hadoop.hive.ql.processors.SetProcessor;
  import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
+import org.apache.hadoop.hive.serde2.thrift.ThriftFormatter;
  import org.apache.hadoop.hive.shims.ShimLoader;
  import org.apache.hive.common.util.HiveVersionInfo;
  import org.apache.hive.service.auth.HiveAuthFactory;
@@ -134,9 +135,8 @@ public class HiveSessionImpl implements HiveSession {
      hiveConf.set(ConfVars.HIVESESSIONID.varname,
          this.sessionHandle.getHandleIdentifier().toString());
      // Use thrift transportable formatter
- hiveConf.set(ListSinkOperator.OUTPUT_FORMATTER,
- FetchFormatter.ThriftFormatter.class.getName());
- hiveConf.setInt(ListSinkOperator.OUTPUT_PROTOCOL, protocol.getValue());
+ hiveConf.set(SerDeUtils.LIST_SINK_OUTPUT_FORMATTER, ThriftFormatter.class.getName());
+ hiveConf.setInt(SerDeUtils.LIST_SINK_OUTPUT_PROTOCOL, protocol.getValue());
    }

    public HiveSessionImpl(TProtocolVersion protocol, String username, String password,

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index e789a38..74263e3 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -424,7 +424,7 @@ public abstract class ThriftCLIService extends AbstractService implements TCLISe
        return cliService.getDelegationTokenFromMetaStore(userName);
      } catch (UnsupportedOperationException e) {
        // The delegation token is not applicable in the given deployment mode
- // such as HMS is not kerberos secured
+ // such as HMS is not kerberos secured
      }
      return null;
    }

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
----------------------------------------------------------------------
diff --git a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
index 698b13d..ff7e9a4 100644
--- a/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
+++ b/service/src/test/org/apache/hive/service/cli/CLIServiceTest.java
@@ -47,7 +47,7 @@ import org.slf4j.LoggerFactory;
  import org.apache.hadoop.hive.conf.HiveConf;
  import org.apache.hadoop.hive.ql.plan.api.StageType;
  import org.apache.hadoop.hive.ql.session.SessionState;
-
+import org.apache.hadoop.hive.serde2.thrift.Type;
  import org.junit.After;
  import org.junit.Before;
  import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/hive/blob/fb230f9d/service/src/test/org/apache/hive/service/cli/TestColumn.java
----------------------------------------------------------------------
diff --git a/service/src/test/org/apache/hive/service/cli/TestColumn.java b/service/src/test/org/apache/hive/service/cli/TestColumn.java
index 87bf848..9980aba 100644
--- a/service/src/test/org/apache/hive/service/cli/TestColumn.java
+++ b/service/src/test/org/apache/hive/service/cli/TestColumn.java
@@ -17,6 +17,8 @@
   */
  package org.apache.hive.service.cli;

+import org.apache.hadoop.hive.serde2.thrift.ColumnBuffer;
+import org.apache.hadoop.hive.serde2.thrift.Type;
  import org.junit.Test;

  import java.util.Arrays;
@@ -57,7 +59,7 @@ public class TestColumn {
        Type type = (Type)entry.getKey();
        List<Object> values = (List)entry.getValue();

- Column c = new Column(type);
+ ColumnBuffer c = new ColumnBuffer(type);
        for (Object v : values) {
          c.addValue(type, v);
        }
@@ -73,7 +75,7 @@ public class TestColumn {

    @Test
    public void testFloatAndDoubleValues() {
- Column floatColumn = new Column(Type.FLOAT_TYPE);
+ ColumnBuffer floatColumn = new ColumnBuffer(Type.FLOAT_TYPE);
      floatColumn.addValue(Type.FLOAT_TYPE, 1.1f);
      floatColumn.addValue(Type.FLOAT_TYPE, 2.033f);

@@ -83,7 +85,7 @@ public class TestColumn {
      assertEquals(1.1, floatColumn.get(0));
      assertEquals(2.033, floatColumn.get(1));

- Column doubleColumn = new Column(Type.DOUBLE_TYPE);
+ ColumnBuffer doubleColumn = new ColumnBuffer(Type.DOUBLE_TYPE);
      doubleColumn.addValue(Type.DOUBLE_TYPE, 1.1);
      doubleColumn.addValue(Type.DOUBLE_TYPE, 2.033);

@@ -95,7 +97,7 @@ public class TestColumn {

    @Test
    public void testBooleanValues() {
- Column boolColumn = new Column(Type.BOOLEAN_TYPE);
+ ColumnBuffer boolColumn = new ColumnBuffer(Type.BOOLEAN_TYPE);
      boolColumn.addValue(Type.BOOLEAN_TYPE, true);
      boolColumn.addValue(Type.BOOLEAN_TYPE, false);

@@ -107,7 +109,7 @@ public class TestColumn {

    @Test
    public void testStringValues() {
- Column stringColumn = new Column(Type.STRING_TYPE);
+ ColumnBuffer stringColumn = new ColumnBuffer(Type.STRING_TYPE);
      stringColumn.addValue(Type.STRING_TYPE, "12abc456");
      stringColumn.addValue(Type.STRING_TYPE, "~special$&string");

@@ -119,7 +121,7 @@ public class TestColumn {

    @Test
    public void testBinaryValues() {
- Column binaryColumn = new Column(Type.BINARY_TYPE);
+ ColumnBuffer binaryColumn = new ColumnBuffer(Type.BINARY_TYPE);
      binaryColumn.addValue(Type.BINARY_TYPE, new byte[]{-1, 0, 3, 4});

      assertEquals(Type.BINARY_TYPE, binaryColumn.getType());

Search Discussions

Discussion Posts

Previous

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 3 of 3 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedApr 22, '16 at 7:23p
activeApr 22, '16 at 7:23p
posts3
users1
websitehive.apache.org

1 user in discussion

Vgumashta: 3 posts

People

Translate

site design / logo © 2021 Grokbase