FAQ
Author: daijy
Date: Sat Jun 28 01:07:11 2014
New Revision: 1606277

URL: http://svn.apache.org/r1606277
Log:
HIVE-7301 : Restore constants moved to HiveConf by HIVE-7211 (Navis review by Daniel Dai)

Modified:
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java?rev=1606277&r1=1606276&r2=1606277&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java Sat Jun 28 01:07:11 2014
@@ -59,6 +59,8 @@ import org.apache.hadoop.io.compress.Dec
  import org.apache.hadoop.util.Progressable;
  import org.apache.hadoop.util.ReflectionUtils;

+import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.*;
+
  /**
   * <code>RCFile</code>s, short of Record Columnar File, are flat files
   * consisting of binary key/value pairs, which shares much similarity with
@@ -340,8 +342,15 @@ public class RCFile {

    private static final Log LOG = LogFactory.getLog(RCFile.class);

+ // internal variable
    public static final String COLUMN_NUMBER_METADATA_STR = "hive.io.rcfile.column.number";

+ public static final String RECORD_INTERVAL_CONF_STR = HIVE_RCFILE_RECORD_INTERVAL.varname;
+
+ public static final String COLUMN_NUMBER_CONF_STR = HIVE_RCFILE_COLUMN_NUMBER_CONF.varname;
+
+ public static final String TOLERATE_CORRUPTIONS_CONF_STR = HIVE_RCFILE_TOLERATE_CORRUPTIONS.varname;
+
    // HACK: We actually need BlockMissingException, but that is not available
    // in all hadoop versions.
    public static final String BLOCK_MISSING_MESSAGE =
@@ -978,8 +987,8 @@ public class RCFile {
      public Writer(FileSystem fs, Configuration conf, Path name, int bufferSize,
          short replication, long blockSize, Progressable progress,
          Metadata metadata, CompressionCodec codec) throws IOException {
- RECORD_INTERVAL = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_RCFILE_RECORD_INTERVAL);
- columnNumber = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_RCFILE_COLUMN_NUMBER_CONF);
+ RECORD_INTERVAL = HiveConf.getIntVar(conf, HIVE_RCFILE_RECORD_INTERVAL);
+ columnNumber = HiveConf.getIntVar(conf, HIVE_RCFILE_COLUMN_NUMBER_CONF);

        if (metadata == null) {
          metadata = new Metadata();
@@ -1051,8 +1060,7 @@ public class RCFile {
        this.out = out;
        this.codec = codec;
        this.metadata = metadata;
- this.useNewMagic =
- conf.getBoolean(HiveConf.ConfVars.HIVEUSEEXPLICITRCFILEHEADER.varname, true);
+ this.useNewMagic = conf.getBoolean(HIVEUSEEXPLICITRCFILEHEADER.varname, true);
      }

      /** Returns the compression codec of data in this file. */
@@ -1339,8 +1347,7 @@ public class RCFile {
      /** Create a new RCFile reader. */
      public Reader(FileSystem fs, Path file, int bufferSize, Configuration conf,
          long start, long length) throws IOException {
- tolerateCorruptions = HiveConf.getBoolVar(
- conf, HiveConf.ConfVars.HIVE_RCFILE_TOLERATE_CORRUPTIONS);
+ tolerateCorruptions = HiveConf.getBoolVar(conf, HIVE_RCFILE_TOLERATE_CORRUPTIONS);
        conf.setInt("io.file.buffer.size", bufferSize);
        this.file = file;
        in = openFile(fs, file, bufferSize, length);

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedJun 28, '14 at 1:07a
activeJun 28, '14 at 1:07a
posts1
users1
websitehive.apache.org

1 user in discussion

Daijy: 1 post

People

Translate

site design / logo © 2021 Grokbase