FAQ
Author: gates
Date: Thu Mar 12 19:41:56 2015
New Revision: 1666280

URL: http://svn.apache.org/r1666280
Log:
HIVE-9885 HBase metastore protobufs need to check hasX() functions for values that are not auto set in serialization [hbase-metastore branch] (Alan Gates)

Added:
     hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java
Modified:
     hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
     hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
     hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java
     hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
     hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
     hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/MockUtils.java
     hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestStatsCache.java

Modified: hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java?rev=1666280&r1=1666279&r2=1666280&view=diff
==============================================================================
--- hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java (original)
+++ hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java Thu Mar 12 19:41:56 2015
@@ -105,8 +105,9 @@ public class TestHBaseImport {
      // Turn off caching, as we want to test actual interaction with HBase
      conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true);
      conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, HBaseReadWrite.TEST_CONN);
- HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
- hbase.setConnection(hconn);
+ HBaseReadWrite.setTestConnection(hconn);
+ /*HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
+ hbase.setConnection(hconn);*/
      store = new HBaseStore();
      store.setConf(conf);
    }

Added: hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java?rev=1666280&view=auto
==============================================================================
--- hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java (added)
+++ hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseMetastoreSql.java Thu Mar 12 19:41:56 2015
@@ -0,0 +1,147 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hive.metastore.hbase;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import java.io.IOException;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Integration tests with HBase Mini-cluster for HBaseStore
+ */
+public class TestHBaseMetastoreSql {
+
+ private static final Log LOG = LogFactory.getLog(TestHBaseStoreIntegration.class.getName());
+
+ private static HBaseTestingUtility utility;
+ private static HTableInterface tblTable;
+ private static HTableInterface sdTable;
+ private static HTableInterface partTable;
+ private static HTableInterface dbTable;
+ private static HTableInterface roleTable;
+ private static HTableInterface globalPrivsTable;
+ private static HTableInterface principalRoleMapTable;
+ private static Map<String, String> emptyParameters = new HashMap<String, String>();
+
+ @Rule public ExpectedException thrown = ExpectedException.none();
+ @Mock private HBaseConnection hconn;
+ private HBaseStore store;
+ private HiveConf conf;
+ private Driver driver;
+
+ @BeforeClass
+ public static void startMiniCluster() throws Exception {
+ utility = new HBaseTestingUtility();
+ utility.startMiniCluster();
+ byte[][] families = new byte[][] {HBaseReadWrite.CATALOG_CF, HBaseReadWrite.STATS_CF};
+ tblTable = utility.createTable(HBaseReadWrite.TABLE_TABLE.getBytes(HBaseUtils.ENCODING),
+ families);
+ sdTable = utility.createTable(HBaseReadWrite.SD_TABLE.getBytes(HBaseUtils.ENCODING),
+ HBaseReadWrite.CATALOG_CF);
+ partTable = utility.createTable(HBaseReadWrite.PART_TABLE.getBytes(HBaseUtils.ENCODING),
+ families);
+ dbTable = utility.createTable(HBaseReadWrite.DB_TABLE.getBytes(HBaseUtils.ENCODING),
+ HBaseReadWrite.CATALOG_CF);
+ roleTable = utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
+ HBaseReadWrite.CATALOG_CF);
+ globalPrivsTable =
+ utility.createTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE.getBytes(HBaseUtils.ENCODING),
+ HBaseReadWrite.CATALOG_CF);
+ principalRoleMapTable =
+ utility.createTable(HBaseReadWrite.USER_TO_ROLE_TABLE.getBytes(HBaseUtils.ENCODING),
+ HBaseReadWrite.CATALOG_CF);
+ }
+
+ @AfterClass
+ public static void shutdownMiniCluster() throws Exception {
+ utility.shutdownMiniCluster();
+ }
+
+ @Before
+ public void setupConnection() throws IOException {
+ MockitoAnnotations.initMocks(this);
+ Mockito.when(hconn.getHBaseTable(HBaseReadWrite.SD_TABLE)).thenReturn(sdTable);
+ Mockito.when(hconn.getHBaseTable(HBaseReadWrite.TABLE_TABLE)).thenReturn(tblTable);
+ Mockito.when(hconn.getHBaseTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable);
+ Mockito.when(hconn.getHBaseTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable);
+ Mockito.when(hconn.getHBaseTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable);
+ Mockito.when(hconn.getHBaseTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE)).thenReturn(globalPrivsTable);
+ Mockito.when(hconn.getHBaseTable(HBaseReadWrite.USER_TO_ROLE_TABLE)).thenReturn(principalRoleMapTable);
+ conf = new HiveConf();
+ conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, HBaseReadWrite.TEST_CONN);
+ conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
+ conf.setVar(HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
+ "org.apache.hadoop.hive.metastore.hbase.HBaseStore");
+ conf.setBoolVar(HiveConf.ConfVars.METASTORE_FASTPATH, true);
+ conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+ HBaseReadWrite.setTestConnection(hconn);
+
+ SessionState.start(new CliSessionState(conf));
+ driver = new Driver(conf);
+ }
+
+ @Test
+ public void insertIntoTable() throws Exception {
+ driver.run("create table iit (c int)");
+ CommandProcessorResponse rsp = driver.run("insert into table iit values (3)");
+ Assert.assertEquals(0, rsp.getResponseCode());
+ }
+
+ @Test
+ public void insertIntoPartitionTable() throws Exception {
+ driver.run("create table iipt (c int) partitioned by (ds string)");
+ CommandProcessorResponse rsp =
+ driver.run("insert into table iipt partition(ds) values (1, 'today'), (2, 'yesterday')," +
+ "(3, 'tomorrow')");
+ Assert.assertEquals(0, rsp.getResponseCode());
+ }
+
+
+}

Modified: hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java?rev=1666280&r1=1666279&r2=1666280&view=diff
==============================================================================
--- hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java (original)
+++ hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java Thu Mar 12 19:41:56 2015
@@ -138,8 +138,8 @@ public class TestHBaseStoreIntegration {
      // Turn off caching, as we want to test actual interaction with HBase
      conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true);
      conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, HBaseReadWrite.TEST_CONN);
- HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
- hbase.setConnection(hconn);
+ HBaseReadWrite.setTestConnection(hconn);
+ // HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
      store = new HBaseStore();
      store.setConf(conf);
    }

Modified: hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java?rev=1666280&r1=1666279&r2=1666280&view=diff
==============================================================================
--- hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java (original)
+++ hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java Thu Mar 12 19:41:56 2015
@@ -113,8 +113,8 @@ public class TestStorageDescriptorSharin
      // Turn off caching, as we want to test actual interaction with HBase
      conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true);
      conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, HBaseReadWrite.TEST_CONN);
- HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
- hbase.setConnection(hconn);
+ HBaseReadWrite.setTestConnection(hconn);
+ // HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
      store = new HBaseStore();
      store.setConf(conf);


Modified: hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java?rev=1666280&r1=1666279&r2=1666280&view=diff
==============================================================================
--- hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java (original)
+++ hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java Thu Mar 12 19:41:56 2015
@@ -85,6 +85,7 @@ class HBaseReadWrite {
    private final static int TABLES_TO_CACHE = 10;

    @VisibleForTesting final static String TEST_CONN = "test_connection";
+ private static HBaseConnection testConn;

    private final static String[] tableNames = { DB_TABLE, GLOBAL_PRIVS_TABLE, PART_TABLE,
        USER_TO_ROLE_TABLE, ROLE_TABLE, SD_TABLE, TABLE_TABLE };
@@ -153,7 +154,11 @@ class HBaseReadWrite {

      try {
        String connClass = HiveConf.getVar(conf, HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS);
- if (!TEST_CONN.equals(connClass)) {
+ if (TEST_CONN.equals(connClass)) {
+ conn = testConn;
+ LOG.debug("Using test connection.");
+ } else {
+ LOG.debug("Instantiating connection class " + connClass);
          Class c = Class.forName(connClass);
          Object o = c.newInstance();
          if (HBaseConnection.class.isAssignableFrom(o.getClass())) {
@@ -1629,8 +1634,8 @@ class HBaseReadWrite {
     * @param connection Mock connection objecct
     */
    @VisibleForTesting
- void setConnection(HBaseConnection connection) {
- conn = connection;
+ static void setTestConnection(HBaseConnection connection) {
+ testConn = connection;
    }



Modified: hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java?rev=1666280&r1=1666279&r2=1666280&view=diff
==============================================================================
--- hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java (original)
+++ hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java Thu Mar 12 19:41:56 2015
@@ -190,11 +190,13 @@ class HBaseUtils {
      List<PrivilegeGrantInfo> results = new ArrayList<PrivilegeGrantInfo>();
      for (HbaseMetastoreProto.PrivilegeGrantInfo proto : privileges) {
        PrivilegeGrantInfo pgi = new PrivilegeGrantInfo();
- pgi.setPrivilege(proto.getPrivilege());
+ if (proto.hasPrivilege()) pgi.setPrivilege(proto.getPrivilege());
        pgi.setCreateTime((int)proto.getCreateTime());
- pgi.setGrantor(proto.getGrantor());
- pgi.setGrantorType(convertPrincipalTypes(proto.getGrantorType()));
- pgi.setGrantOption(proto.getGrantOption());
+ if (proto.hasGrantor()) pgi.setGrantor(proto.getGrantor());
+ if (proto.hasGrantorType()) {
+ pgi.setGrantorType(convertPrincipalTypes(proto.getGrantorType()));
+ }
+ if (proto.hasGrantOption()) pgi.setGrantOption(proto.getGrantOption());
        results.add(pgi);
      }
      return results;
@@ -272,7 +274,7 @@ class HBaseUtils {
      HbaseMetastoreProto.Role protoRole =
          HbaseMetastoreProto.Role.parseFrom(value);
      role.setCreateTime((int)protoRole.getCreateTime());
- role.setOwnerName(protoRole.getOwnerName());
+ if (protoRole.hasOwnerName()) role.setOwnerName(protoRole.getOwnerName());
      return role;
    }

@@ -344,12 +346,14 @@ class HBaseUtils {
      db.setName(dbName);
      HbaseMetastoreProto.Database protoDb = HbaseMetastoreProto.Database.parseFrom(value);
      db.setName(dbName);
- db.setDescription(protoDb.getDescription());
- db.setLocationUri(protoDb.getUri());
- db.setParameters(buildParameters(protoDb.getParameters()));
- db.setPrivileges(buildPrincipalPrivilegeSet(protoDb.getPrivileges()));
- db.setOwnerName(protoDb.getOwnerName());
- db.setOwnerType(convertPrincipalTypes(protoDb.getOwnerType()));
+ if (protoDb.hasDescription()) db.setDescription(protoDb.getDescription());
+ if (protoDb.hasUri()) db.setLocationUri(protoDb.getUri());
+ if (protoDb.hasParameters()) db.setParameters(buildParameters(protoDb.getParameters()));
+ if (protoDb.hasPrivileges()) {
+ db.setPrivileges(buildPrincipalPrivilegeSet(protoDb.getPrivileges()));
+ }
+ if (protoDb.hasOwnerName()) db.setOwnerName(protoDb.getOwnerName());
+ if (protoDb.hasOwnerType()) db.setOwnerType(convertPrincipalTypes(protoDb.getOwnerType()));

      return db;
    }
@@ -372,7 +376,8 @@ class HBaseUtils {
    convertFieldSchemaListFromProto(List<HbaseMetastoreProto.FieldSchema> protoList) {
      List<FieldSchema> schemas = new ArrayList<FieldSchema>(protoList.size());
      for (HbaseMetastoreProto.FieldSchema proto : protoList) {
- schemas.add(new FieldSchema(proto.getName(), proto.getType(), proto.getComment()));
+ schemas.add(new FieldSchema(proto.getName(), proto.getType(),
+ proto.hasComment() ? proto.getComment() : null));
      }
      return schemas;
    }
@@ -557,35 +562,42 @@ class HBaseUtils {
          HbaseMetastoreProto.StorageDescriptor.parseFrom(serialized);
      StorageDescriptor sd = new StorageDescriptor();
      sd.setCols(convertFieldSchemaListFromProto(proto.getColsList()));
- sd.setInputFormat(proto.getInputFormat());
- sd.setOutputFormat(proto.getOutputFormat());
+ if (proto.hasInputFormat()) sd.setInputFormat(proto.getInputFormat());
+ if (proto.hasOutputFormat()) sd.setOutputFormat(proto.getOutputFormat());
      sd.setCompressed(proto.getIsCompressed());
      sd.setNumBuckets(proto.getNumBuckets());
- SerDeInfo serde = new SerDeInfo();
- serde.setName(proto.getSerdeInfo().getName());
- serde.setSerializationLib(proto.getSerdeInfo().getSerializationLib());
- serde.setParameters(buildParameters(proto.getSerdeInfo().getParameters()));
- sd.setSerdeInfo(serde);
+ if (proto.hasSerdeInfo()) {
+ SerDeInfo serde = new SerDeInfo();
+ serde.setName(proto.getSerdeInfo().getName());
+ serde.setSerializationLib(proto.getSerdeInfo().getSerializationLib());
+ serde.setParameters(buildParameters(proto.getSerdeInfo().getParameters()));
+ sd.setSerdeInfo(serde);
+ }
      sd.setBucketCols(new ArrayList<String>(proto.getBucketColsList()));
      List<Order> sortCols = new ArrayList<Order>();
      for (HbaseMetastoreProto.StorageDescriptor.Order protoOrder : proto.getSortColsList()) {
        sortCols.add(new Order(protoOrder.getColumnName(), protoOrder.getOrder()));
      }
      sd.setSortCols(sortCols);
- SkewedInfo skewed = new SkewedInfo();
- skewed.setSkewedColNames(new ArrayList<String>(proto.getSkewedInfo().getSkewedColNamesList()));
- for (HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList innerList :
- proto.getSkewedInfo().getSkewedColValuesList()) {
- skewed.addToSkewedColValues(new ArrayList<String>(innerList.getSkewedColValueList()));
- }
- Map<List<String>, String> colMaps = new HashMap<List<String>, String>();
- for (HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap map :
- proto.getSkewedInfo().getSkewedColValueLocationMapsList()) {
- colMaps.put(new ArrayList<String>(map.getKeyList()), map.getValue());
- }
- skewed.setSkewedColValueLocationMaps(colMaps);
- sd.setSkewedInfo(skewed);
- sd.setStoredAsSubDirectories(proto.getStoredAsSubDirectories());
+ if (proto.hasSkewedInfo()) {
+ SkewedInfo skewed = new SkewedInfo();
+ skewed
+ .setSkewedColNames(new ArrayList<String>(proto.getSkewedInfo().getSkewedColNamesList()));
+ for (HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueList innerList :
+ proto.getSkewedInfo().getSkewedColValuesList()) {
+ skewed.addToSkewedColValues(new ArrayList<String>(innerList.getSkewedColValueList()));
+ }
+ Map<List<String>, String> colMaps = new HashMap<List<String>, String>();
+ for (HbaseMetastoreProto.StorageDescriptor.SkewedInfo.SkewedColValueLocationMap map :
+ proto.getSkewedInfo().getSkewedColValueLocationMapsList()) {
+ colMaps.put(new ArrayList<String>(map.getKeyList()), map.getValue());
+ }
+ skewed.setSkewedColValueLocationMaps(colMaps);
+ sd.setSkewedInfo(skewed);
+ }
+ if (proto.hasStoredAsSubDirectories()) {
+ sd.setStoredAsSubDirectories(proto.getStoredAsSubDirectories());
+ }
      return sd;
    }

@@ -674,10 +686,10 @@ class HBaseUtils {
      part.setValues(partVals);
      part.setCreateTime((int)proto.getCreateTime());
      part.setLastAccessTime((int)proto.getLastAccessTime());
- sdParts.location = proto.getLocation();
- sdParts.parameters = buildParameters(proto.getSdParameters());
+ if (proto.hasLocation()) sdParts.location = proto.getLocation();
+ if (proto.hasSdParameters()) sdParts.parameters = buildParameters(proto.getSdParameters());
      sdParts.sdHash = proto.getSdHash().toByteArray();
- part.setParameters(buildParameters(proto.getParameters()));
+ if (proto.hasParameters()) part.setParameters(buildParameters(proto.getParameters()));
      return sdParts;
    }

@@ -761,16 +773,18 @@ class HBaseUtils {
      table.setCreateTime((int)proto.getCreateTime());
      table.setLastAccessTime((int)proto.getLastAccessTime());
      table.setRetention((int)proto.getRetention());
- sdParts.location = proto.getLocation();
- sdParts.parameters = buildParameters(proto.getSdParameters());
+ if (proto.hasLocation()) sdParts.location = proto.getLocation();
+ if (proto.hasSdParameters()) sdParts.parameters = buildParameters(proto.getSdParameters());
      sdParts.sdHash = proto.getSdHash().toByteArray();
      table.setPartitionKeys(convertFieldSchemaListFromProto(proto.getPartitionKeysList()));
      table.setParameters(buildParameters(proto.getParameters()));
- table.setViewOriginalText(proto.getViewOriginalText());
- table.setViewExpandedText(proto.getViewExpandedText());
+ if (proto.hasViewOriginalText()) table.setViewOriginalText(proto.getViewOriginalText());
+ if (proto.hasViewExpandedText()) table.setViewExpandedText(proto.getViewExpandedText());
      table.setTableType(proto.getTableType());
- table.setPrivileges(buildPrincipalPrivilegeSet(proto.getPrivileges()));
- table.setTemporary(proto.getIsTemporary());
+ if (proto.hasPrivileges()) {
+ table.setPrivileges(buildPrincipalPrivilegeSet(proto.getPrivileges()));
+ }
+ if (proto.hasIsTemporary()) table.setTemporary(proto.getIsTemporary());
      return sdParts;
    }

@@ -880,15 +894,23 @@ class HBaseUtils {
        colData.setBooleanStats(boolData);
      } else if (proto.hasLongStats()) {
        LongColumnStatsData longData = new LongColumnStatsData();
- longData.setLowValue(proto.getLongStats().getLowValue());
- longData.setHighValue(proto.getLongStats().getHighValue());
+ if (proto.getLongStats().hasLowValue()) {
+ longData.setLowValue(proto.getLongStats().getLowValue());
+ }
+ if (proto.getLongStats().hasHighValue()) {
+ longData.setHighValue(proto.getLongStats().getHighValue());
+ }
        longData.setNumNulls(proto.getNumNulls());
        longData.setNumDVs(proto.getNumDistinctValues());
        colData.setLongStats(longData);
      } else if (proto.hasDoubleStats()) {
        DoubleColumnStatsData doubleData = new DoubleColumnStatsData();
- doubleData.setLowValue(proto.getDoubleStats().getLowValue());
- doubleData.setHighValue(proto.getDoubleStats().getHighValue());
+ if (proto.getDoubleStats().hasLowValue()) {
+ doubleData.setLowValue(proto.getDoubleStats().getLowValue());
+ }
+ if (proto.getDoubleStats().hasHighValue()) {
+ doubleData.setHighValue(proto.getDoubleStats().getHighValue());
+ }
        doubleData.setNumNulls(proto.getNumNulls());
        doubleData.setNumDVs(proto.getNumDistinctValues());
        colData.setDoubleStats(doubleData);
@@ -907,14 +929,18 @@ class HBaseUtils {
        colData.setBinaryStats(binaryData);
      } else if (proto.hasDecimalStats()) {
        DecimalColumnStatsData decimalData = new DecimalColumnStatsData();
- Decimal hiVal = new Decimal();
- hiVal.setUnscaled(proto.getDecimalStats().getHighValue().getUnscaled().toByteArray());
- hiVal.setScale((short) proto.getDecimalStats().getHighValue().getScale());
- decimalData.setHighValue(hiVal);
- Decimal loVal = new Decimal();
- loVal.setUnscaled(proto.getDecimalStats().getLowValue().getUnscaled().toByteArray());
- loVal.setScale((short) proto.getDecimalStats().getLowValue().getScale());
- decimalData.setLowValue(loVal);
+ if (proto.getDecimalStats().hasHighValue()) {
+ Decimal hiVal = new Decimal();
+ hiVal.setUnscaled(proto.getDecimalStats().getHighValue().getUnscaled().toByteArray());
+ hiVal.setScale((short) proto.getDecimalStats().getHighValue().getScale());
+ decimalData.setHighValue(hiVal);
+ }
+ if (proto.getDecimalStats().hasLowValue()) {
+ Decimal loVal = new Decimal();
+ loVal.setUnscaled(proto.getDecimalStats().getLowValue().getUnscaled().toByteArray());
+ loVal.setScale((short) proto.getDecimalStats().getLowValue().getScale());
+ decimalData.setLowValue(loVal);
+ }
        decimalData.setNumNulls(proto.getNumNulls());
        decimalData.setNumDVs(proto.getNumDistinctValues());
        colData.setDecimalStats(decimalData);

Modified: hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/MockUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/MockUtils.java?rev=1666280&r1=1666279&r2=1666280&view=diff
==============================================================================
--- hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/MockUtils.java (original)
+++ hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/MockUtils.java Thu Mar 12 19:41:56 2015
@@ -146,8 +146,8 @@ public class MockUtils {
      HBaseConnection hconn = Mockito.mock(HBaseConnection.class);
      Mockito.when(hconn.getHBaseTable(Mockito.anyString())).thenReturn(htable);
      HiveConf.setVar(conf, HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, HBaseReadWrite.TEST_CONN);
+ HBaseReadWrite.setTestConnection(hconn);
      HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf);
- hbase.setConnection(hconn);
      HBaseStore store = new HBaseStore();
      store.setConf(conf);
      return store;

Modified: hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestStatsCache.java
URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestStatsCache.java?rev=1666280&r1=1666279&r2=1666280&view=diff
==============================================================================
--- hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestStatsCache.java (original)
+++ hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestStatsCache.java Thu Mar 12 19:41:56 2015
@@ -101,8 +101,8 @@ public class TestStatsCache {
      HiveConf conf = new HiveConf();
      conf.setIntVar(HiveConf.ConfVars.METASTORE_HBASE_CACHE_SIZE, 30);
      conf.setVar(HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, HBaseReadWrite.TEST_CONN);
+ HBaseReadWrite.setTestConnection(hconn);
      hrw = HBaseReadWrite.getInstance(conf);
- hrw.setConnection(hconn);
      StatsCache.getInstance(conf).clear();
      puts[0] = puts[1] = null;
    }

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedMar 12, '15 at 7:42p
activeMar 12, '15 at 7:42p
posts1
users1
websitehive.apache.org

1 user in discussion

Gates: 1 post

People

Translate

site design / logo © 2021 Grokbase