FAQ
Author: navis
Date: Thu Feb 6 02:47:54 2014
New Revision: 1565054

URL: http://svn.apache.org/r1565054
Log:
HIVE-4144 : Add "select database()" command to show the current database (Navis reviewed by Prasad Mujumdar)

Added:
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java
     hive/trunk/ql/src/test/queries/clientpositive/select_dummy_source.q
     hive/trunk/ql/src/test/queries/clientpositive/udf_current_database.q
     hive/trunk/ql/src/test/results/clientpositive/select_dummy_source.q.out
     hive/trunk/ql/src/test/results/clientpositive/udf_current_database.q.out
Modified:
     hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/OneNullRowInputFormat.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
     hive/trunk/ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out
     hive/trunk/ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out
     hive/trunk/ql/src/test/results/clientnegative/select_udtf_alias.q.out
     hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out
     hive/trunk/ql/src/test/results/compiler/errors/invalid_select.q.out

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu Feb 6 02:47:54 2014
@@ -413,6 +413,8 @@ public class HiveConf extends Configurat
      HIVEADDEDJARS("hive.added.jars.path", ""),
      HIVEADDEDARCHIVES("hive.added.archives.path", ""),

+ HIVE_CURRENT_DATABASE("hive.current.database", ""), // internal usage only
+
      // for hive script operator
      HIVES_AUTO_PROGRESS_TIMEOUT("hive.auto.progress.timeout", 0),
      HIVETABLENAME("hive.table.name", ""),

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Thu Feb 6 02:47:54 2014
@@ -555,6 +555,9 @@ public class Driver implements CommandPr
      }
      if (outputs != null && outputs.size() > 0) {
        for (WriteEntity write : outputs) {
+ if (write.isDummy()) {
+ continue;
+ }
          if (write.getType() == Entity.Type.DATABASE) {
            ss.getAuthorizer().authorize(write.getDatabase(),
                null, op.getOutputRequiredPrivileges());
@@ -585,7 +588,7 @@ public class Driver implements CommandPr
        //determine if partition level privileges should be checked for input tables
        Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
        for (ReadEntity read : inputs) {
- if (read.getType() == Entity.Type.DATABASE) {
+ if (read.isDummy() || read.getType() == Entity.Type.DATABASE) {
            continue;
          }
          Table tbl = read.getTable();
@@ -659,6 +662,9 @@ public class Driver implements CommandPr
        // cache the results for table authorization
        Set<String> tableAuthChecked = new HashSet<String>();
        for (ReadEntity read : inputs) {
+ if (read.isDummy()) {
+ continue;
+ }
          if (read.getType() == Entity.Type.DATABASE) {
            ss.getAuthorizer().authorize(read.getDatabase(), op.getInputRequiredPrivileges(), null);
            continue;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Thu Feb 6 02:47:54 2014
@@ -283,6 +283,8 @@ public final class FunctionRegistry {
      registerUDF("^", UDFOPBitXor.class, true);
      registerUDF("~", UDFOPBitNot.class, true);

+ registerGenericUDF("current_database", UDFCurrentDB.class);
+
      registerGenericUDF("isnull", GenericUDFOPNull.class);
      registerGenericUDF("isnotnull", GenericUDFOPNotNull.class);


Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/RowSchema.java Thu Feb 6 02:47:54 2014
@@ -50,9 +50,13 @@ public class RowSchema implements Serial

    @Override
    public String toString() {
- StringBuilder sb = new StringBuilder('(');
+ StringBuilder sb = new StringBuilder();
+ sb.append('(');
      if (signature != null) {
- for (ColumnInfo col : signature) {
+ for (ColumnInfo col: signature) {
+ if (sb.length() > 1) {
+ sb.append(',');
+ }
          sb.append(col.toString());
        }
      }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java Thu Feb 6 02:47:54 2014
@@ -91,7 +91,7 @@ import org.apache.log4j.varia.NullAppend

  /**
   * ExecDriver is the central class in co-ordinating execution of any map-reduce task.
- * It's main responsabilities are:
+ * It's main responsibilities are:
   *
   * - Converting the plan (MapredWork) into a MR Job (JobConf)
   * - Submitting a MR job to the cluster via JobClient and ExecHelper
@@ -394,7 +394,7 @@ public class ExecDriver extends Task<Map
          HiveConf.setVar(job, HiveConf.ConfVars.METASTOREPWD, "HIVE");
        }
        JobClient jc = new JobClient(job);
- // make this client wait if job trcker is not behaving well.
+ // make this client wait if job tracker is not behaving well.
        Throttle.checkJobTracker(job, LOG);

        if (mWork.isGatheringStats() || (rWork != null && rWork.isGatheringStats())) {
@@ -505,7 +505,7 @@ public class ExecDriver extends Task<Map

      if (mWork.getSamplingType() == MapWork.SAMPLING_ON_PREV_MR) {
        console.printInfo("Use sampling data created in previous MR");
- // merges sampling data from previous MR and make paritition keys for total sort
+ // merges sampling data from previous MR and make partition keys for total sort
        for (Path path : inputPaths) {
          FileSystem fs = path.getFileSystem(job);
          for (FileStatus status : fs.globStatus(new Path(path, ".sampling*"))) {
@@ -547,7 +547,7 @@ public class ExecDriver extends Task<Map
    protected void setInputAttributes(Configuration conf) {
      MapWork mWork = work.getMapWork();
      if (mWork.getInputformat() != null) {
- HiveConf.setVar(conf, HiveConf.ConfVars.HIVEINPUTFORMAT, mWork.getInputformat());
+ HiveConf.setVar(conf, ConfVars.HIVEINPUTFORMAT, mWork.getInputformat());
      }
      if (mWork.getIndexIntermediateFile() != null) {
        conf.set("hive.index.compact.file", mWork.getIndexIntermediateFile());
@@ -556,6 +556,18 @@ public class ExecDriver extends Task<Map

      // Intentionally overwrites anything the user may have put here
      conf.setBoolean("hive.input.format.sorted", mWork.isInputFormatSorted());
+
+ if (HiveConf.getVar(conf, ConfVars.HIVE_CURRENT_DATABASE, null) == null) {
+ HiveConf.setVar(conf, ConfVars.HIVE_CURRENT_DATABASE, getCurrentDB());
+ }
+ }
+
+ public static String getCurrentDB() {
+ String currentDB = null;
+ if (SessionState.get() != null) {
+ currentDB = SessionState.get().getCurrentDatabase();
+ }
+ return currentDB == null ? "default" : currentDB;
    }

    public boolean mapStarted() {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java Thu Feb 6 02:47:54 2014
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.metastore.
  import org.apache.hadoop.hive.ql.metadata.DummyPartition;
  import org.apache.hadoop.hive.ql.metadata.Partition;
  import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;

  /**
   * This class encapsulates an object that is being read or written to by the
@@ -264,6 +265,16 @@ public class Entity implements Serializa
      return t;
    }

+ public boolean isDummy() {
+ if (typ == Type.DATABASE) {
+ return database.getName().equals(SemanticAnalyzer.DUMMY_DATABASE);
+ }
+ if (typ == Type.TABLE) {
+ return t.isDummyTable();
+ }
+ return false;
+ }
+
    /**
     * toString function.
     */

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java?rev=1565054&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NullRowsInputFormat.java Thu Feb 6 02:47:54 2014
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobConfigurable;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+
+/**
+ * NullRowsInputFormat outputs null rows, maximum 100.
+ */
+public class NullRowsInputFormat implements InputFormat<NullWritable, NullWritable>,
+ JobConfigurable {
+
+ static final int MAX_ROW = 100; // to prevent infinite loop
+ static final Log LOG = LogFactory.getLog(NullRowsRecordReader.class.getName());
+
+ public static class DummyInputSplit implements InputSplit {
+ public DummyInputSplit() {
+ }
+
+ @Override
+ public long getLength() throws IOException {
+ return 1;
+ }
+
+ @Override
+ public String[] getLocations() throws IOException {
+ return new String[0];
+ }
+
+ @Override
+ public void readFields(DataInput arg0) throws IOException {
+ }
+
+ @Override
+ public void write(DataOutput arg0) throws IOException {
+ }
+
+ }
+
+ public static class NullRowsRecordReader implements RecordReader<NullWritable, NullWritable> {
+
+ private int counter;
+
+ public NullRowsRecordReader() {
+ }
+ @Override
+ public void close() throws IOException {
+ }
+
+ @Override
+ public NullWritable createKey() {
+ return NullWritable.get();
+ }
+
+ @Override
+ public NullWritable createValue() {
+ return NullWritable.get();
+ }
+
+ @Override
+ public long getPos() throws IOException {
+ return counter;
+ }
+
+ @Override
+ public float getProgress() throws IOException {
+ return (float)counter / MAX_ROW;
+ }
+
+ @Override
+ public boolean next(NullWritable arg0, NullWritable arg1) throws IOException {
+ if (counter++ < MAX_ROW) {
+ return true;
+ }
+ return false;
+ }
+ }
+
+ @Override
+ public RecordReader<NullWritable, NullWritable> getRecordReader(InputSplit arg0,
+ JobConf arg1, Reporter arg2) throws IOException {
+ return new NullRowsRecordReader();
+ }
+
+ @Override
+ public InputSplit[] getSplits(JobConf arg0, int arg1) throws IOException {
+ InputSplit[] ret = new InputSplit[1];
+ ret[0] = new DummyInputSplit();
+ LOG.info("Calculating splits");
+ return ret;
+ }
+
+ @Override
+ public void configure(JobConf job) {
+ LOG.info("Using null rows input format");
+ }
+
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/OneNullRowInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/OneNullRowInputFormat.java?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/OneNullRowInputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/OneNullRowInputFormat.java Thu Feb 6 02:47:54 2014
@@ -18,19 +18,11 @@

  package org.apache.hadoop.hive.ql.io;

-import java.io.DataInput;
-import java.io.DataOutput;
  import java.io.IOException;
-import java.util.List;

-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.plan.MapredWork;
  import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapred.InputFormat;
  import org.apache.hadoop.mapred.InputSplit;
  import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobConfigurable;
  import org.apache.hadoop.mapred.RecordReader;
  import org.apache.hadoop.mapred.Reporter;

@@ -39,95 +31,34 @@ import org.apache.hadoop.mapred.Reporter
   * metadata only queries.
   *
   */
-public class OneNullRowInputFormat implements
- InputFormat<NullWritable, NullWritable>, JobConfigurable {
- private static final Log LOG = LogFactory.getLog(OneNullRowInputFormat.class
- .getName());
- MapredWork mrwork = null;
- List<String> partitions;
- long len;
-
- static public class DummyInputSplit implements InputSplit {
- public DummyInputSplit() {
- }
-
- @Override
- public long getLength() throws IOException {
- return 1;
- }
-
- @Override
- public String[] getLocations() throws IOException {
- return new String[0];
- }
-
- @Override
- public void readFields(DataInput arg0) throws IOException {
- }
-
- @Override
- public void write(DataOutput arg0) throws IOException {
- }
+public class OneNullRowInputFormat extends NullRowsInputFormat {

+ @Override
+ public RecordReader<NullWritable, NullWritable> getRecordReader(InputSplit arg0,
+ JobConf arg1, Reporter arg2) throws IOException {
+ return new OneNullRowRecordReader();
    }

- static public class OneNullRowRecordReader implements RecordReader<NullWritable, NullWritable> {
- private boolean processed = false;
- public OneNullRowRecordReader() {
- }
- @Override
- public void close() throws IOException {
- }
-
- @Override
- public NullWritable createKey() {
- return NullWritable.get();
- }
-
- @Override
- public NullWritable createValue() {
- return NullWritable.get();
- }
+ public static class OneNullRowRecordReader extends NullRowsRecordReader {
+ private boolean processed;

      @Override
      public long getPos() throws IOException {
- return (processed ? 1 : 0);
+ return processed ? 1 : 0;
      }

      @Override
      public float getProgress() throws IOException {
- return (float) (processed ? 1.0 : 0.0);
+ return processed ? 1.0f : 0f;
      }

      @Override
- public boolean next(NullWritable arg0, NullWritable arg1) throws IOException {
- if(processed) {
+ public boolean next(NullWritable key, NullWritable value) throws IOException {
+ if (processed) {
          return false;
- } else {
- processed = true;
- return true;
        }
+ processed = true;
+ return true;
      }
-
- }
-
- @Override
- public RecordReader<NullWritable, NullWritable> getRecordReader(InputSplit arg0, JobConf arg1, Reporter arg2)
- throws IOException {
- return new OneNullRowRecordReader();
- }
-
- @Override
- public InputSplit[] getSplits(JobConf arg0, int arg1) throws IOException {
- InputSplit[] ret = new InputSplit[1];
- ret[0] = new DummyInputSplit();
- LOG.info("Calculating splits");
- return ret;
    }
-
- @Override
- public void configure(JobConf job) {
- LOG.info("Using one null row input format");
- }
-
-}
\ No newline at end of file
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Thu Feb 6 02:47:54 2014
@@ -23,13 +23,11 @@ import java.io.Serializable;
  import java.util.ArrayList;
  import java.util.Arrays;
  import java.util.HashMap;
-import java.util.HashSet;
  import java.util.Iterator;
  import java.util.LinkedHashMap;
  import java.util.List;
  import java.util.Map;
  import java.util.Properties;
-import java.util.Set;

  import org.apache.commons.logging.Log;
  import org.apache.commons.logging.LogFactory;
@@ -47,11 +45,11 @@ import org.apache.hadoop.hive.metastore.
  import org.apache.hadoop.hive.metastore.api.SerDeInfo;
  import org.apache.hadoop.hive.metastore.api.SkewedInfo;
  import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.ql.ErrorMsg;
  import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
  import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
  import org.apache.hadoop.hive.ql.io.HivePassThroughOutputFormat;
  import org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
  import org.apache.hadoop.hive.ql.parse.SemanticException;
  import org.apache.hadoop.hive.serde.serdeConstants;
  import org.apache.hadoop.hive.serde2.Deserializer;
@@ -109,6 +107,10 @@ public class Table implements Serializab
      this(getEmptyTable(databaseName, tableName));
    }

+ public boolean isDummyTable() {
+ return tTable.getTableName().equals(SemanticAnalyzer.DUMMY_TABLE);
+ }
+
    /**
     * This function should only be used in serialization.
     * We should never call this function to modify the fields, because

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Thu Feb 6 02:47:54 2014
@@ -1973,7 +1973,7 @@ regularBody[boolean topLevel]
  singleSelectStatement
     :
     selectClause
- fromClause
+ fromClause?
     whereClause?
     groupByClause?
     havingClause?
@@ -1982,7 +1982,7 @@ singleSelectStatement
     distributeByClause?
     sortByClause?
     window_clause?
- limitClause? -> ^(TOK_QUERY fromClause ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
+ limitClause? -> ^(TOK_QUERY fromClause? ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
                       selectClause whereClause? groupByClause? havingClause? orderByClause? clusterByClause?
                       distributeByClause? sortByClause? window_clause? limitClause?))
     ;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Thu Feb 6 02:47:54 2014
@@ -18,8 +18,8 @@

  package org.apache.hadoop.hive.ql.parse;

+import java.io.IOException;
  import java.io.Serializable;
-import java.net.URI;
  import java.util.ArrayList;
  import java.util.Arrays;
  import java.util.HashMap;
@@ -39,6 +39,8 @@ import org.antlr.runtime.tree.Tree;
  import org.antlr.runtime.tree.TreeWizard;
  import org.antlr.runtime.tree.TreeWizard.ContextVisitor;
  import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
  import org.apache.hadoop.fs.Path;
  import org.apache.hadoop.hive.common.FileUtils;
  import org.apache.hadoop.hive.common.JavaUtils;
@@ -75,8 +77,10 @@ import org.apache.hadoop.hive.ql.exec.Ut
  import org.apache.hadoop.hive.ql.hooks.ReadEntity;
  import org.apache.hadoop.hive.ql.hooks.WriteEntity;
  import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
+import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
  import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
  import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
+import org.apache.hadoop.hive.ql.io.NullRowsInputFormat;
  import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
  import org.apache.hadoop.hive.ql.lib.Dispatcher;
  import org.apache.hadoop.hive.ql.lib.GraphWalker;
@@ -165,6 +169,7 @@ import org.apache.hadoop.hive.ql.udf.gen
  import org.apache.hadoop.hive.serde.serdeConstants;
  import org.apache.hadoop.hive.serde2.Deserializer;
  import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
+import org.apache.hadoop.hive.serde2.NullStructSerDe;
  import org.apache.hadoop.hive.serde2.SerDeException;
  import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
  import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
@@ -178,6 +183,7 @@ import org.apache.hadoop.hive.serde2.typ
  import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
  import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
  import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.io.IOUtils;
  import org.apache.hadoop.mapred.InputFormat;

  /**
@@ -187,6 +193,10 @@ import org.apache.hadoop.mapred.InputFor
   */

  public class SemanticAnalyzer extends BaseSemanticAnalyzer {
+
+ public static final String DUMMY_DATABASE = "_dummy_database";
+ public static final String DUMMY_TABLE = "_dummy_table";
+
    private HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner;
    private HashMap<TableScanOperator, PrunedPartitionList> opToPartList;
    private HashMap<String, Operator<? extends OperatorDesc>> topOps;
@@ -8615,6 +8625,15 @@ public class SemanticAnalyzer extends Ba
        aliasToOpInfo.put(alias, op);
      }

+ if (aliasToOpInfo.isEmpty()) {
+ qb.getMetaData().setSrcForAlias(DUMMY_TABLE, getDummyTable());
+ TableScanOperator op = (TableScanOperator) genTablePlan(DUMMY_TABLE, qb);
+ op.getConf().setRowLimit(1);
+ qb.addAlias(DUMMY_TABLE);
+ qb.setTabAlias(DUMMY_TABLE, DUMMY_TABLE);
+ aliasToOpInfo.put(DUMMY_TABLE, op);
+ }
+
      Operator srcOpInfo = null;
      Operator lastPTFOp = null;

@@ -8696,6 +8715,37 @@ public class SemanticAnalyzer extends Ba
      return bodyOpInfo;
    }

+ private Table getDummyTable() throws SemanticException {
+ Path dummyPath = createDummyFile();
+ Table desc = new Table(DUMMY_DATABASE, DUMMY_TABLE);
+ desc.getTTable().getSd().setLocation(dummyPath.toString());
+ desc.getTTable().getSd().getSerdeInfo().setSerializationLib(NullStructSerDe.class.getName());
+ desc.setInputFormatClass(NullRowsInputFormat.class);
+ desc.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
+ return desc;
+ }
+
+ // add dummy data for not removed by CombineHiveInputFormat, etc.
+ private Path createDummyFile() throws SemanticException {
+ Path dummyPath = new Path(ctx.getMRScratchDir(), "dummy_path");
+ Path dummyFile = new Path(dummyPath, "dummy_file");
+ FSDataOutputStream fout = null;
+ try {
+ FileSystem fs = dummyFile.getFileSystem(conf);
+ if (fs.exists(dummyFile)) {
+ return dummyPath;
+ }
+ fout = fs.create(dummyFile);
+ fout.write(1);
+ fout.close();
+ } catch (IOException e) {
+ throw new SemanticException(e);
+ } finally {
+ IOUtils.closeStream(fout);
+ }
+ return dummyPath;
+ }
+
    /**
     * Generates the operator DAG needed to implement lateral views and attaches
     * it to the TS operator.

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java?rev=1565054&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java Thu Feb 6 02:47:54 2014
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.MapredContext;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.io.Text;
+
+// deterministic in the query range
+@Description(name = "current_database",
+ value = "_FUNC_() - returns currently using database name")
+public class UDFCurrentDB extends GenericUDF {
+
+ private MapredContext context;
+
+ @Override
+ public void configure(MapredContext context) {
+ this.context = context;
+ }
+
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ String database;
+ if (context != null) {
+ database = context.getJobConf().get("hive.current.database");
+ } else {
+ database = SessionState.get().getCurrentDatabase();
+ }
+ return PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
+ TypeInfoFactory.stringTypeInfo, new Text(database));
+ }
+
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ throw new IllegalStateException("never");
+ }
+
+ @Override
+ public String getDisplayString(String[] children) {
+ return "current_database()";
+ }
+}

Added: hive/trunk/ql/src/test/queries/clientpositive/select_dummy_source.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/select_dummy_source.q?rev=1565054&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/select_dummy_source.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/select_dummy_source.q Thu Feb 6 02:47:54 2014
@@ -0,0 +1,33 @@
+explain
+select 'a', 100;
+select 'a', 100;
+
+--evaluation
+explain
+select 1 + 1;
+select 1 + 1;
+
+-- explode (not possible for lateral view)
+explain
+select explode(array('a', 'b'));
+select explode(array('a', 'b'));
+
+set hive.fetch.task.conversion=more;
+
+explain
+select 'a', 100;
+select 'a', 100;
+
+explain
+select 1 + 1;
+select 1 + 1;
+
+explain
+select explode(array('a', 'b'));
+select explode(array('a', 'b'));
+
+-- subquery
+explain
+select 2 + 3,x from (select 1 + 2 x) X;
+select 2 + 3,x from (select 1 + 2 x) X;
+

Added: hive/trunk/ql/src/test/queries/clientpositive/udf_current_database.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/udf_current_database.q?rev=1565054&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/udf_current_database.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/udf_current_database.q Thu Feb 6 02:47:54 2014
@@ -0,0 +1,26 @@
+DESCRIBE FUNCTION current_database;
+
+explain
+select current_database();
+select current_database();
+
+create database xxx;
+use xxx;
+
+explain
+select current_database();
+select current_database();
+
+set hive.fetch.task.conversion=more;
+
+use default;
+
+explain
+select current_database();
+select current_database();
+
+use xxx;
+
+explain
+select current_database();
+select current_database();

Modified: hive/trunk/ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/ptf_negative_DistributeByOrderBy.q.out Thu Feb 6 02:47:54 2014
@@ -27,4 +27,5 @@ POSTHOOK: query: CREATE TABLE part(
  )
  POSTHOOK: type: CREATETABLE
  POSTHOOK: Output: default@part
-FAILED: ParseException line 5:46 missing ) at 'order' near 'p_mfgr' in table name
+FAILED: ParseException line 5:46 missing ) at 'order' near 'p_mfgr'
+line 5:61 missing EOF at ')' near 'p_mfgr'

Modified: hive/trunk/ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/ptf_negative_PartitionBySortBy.q.out Thu Feb 6 02:47:54 2014
@@ -27,4 +27,5 @@ POSTHOOK: query: CREATE TABLE part(
  )
  POSTHOOK: type: CREATETABLE
  POSTHOOK: Output: default@part
-FAILED: ParseException line 5:45 missing ) at 'sort' near 'p_mfgr' in table name
+FAILED: ParseException line 5:45 missing ) at 'sort' near 'p_mfgr'
+line 5:59 missing EOF at ')' near 'p_mfgr'

Modified: hive/trunk/ql/src/test/results/clientnegative/select_udtf_alias.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/select_udtf_alias.q.out?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/select_udtf_alias.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/select_udtf_alias.q.out Thu Feb 6 02:47:54 2014
@@ -1 +1 @@
-FAILED: ParseException line 3:49 missing FROM at 'LIMIT' near ')' in table name
+FAILED: SemanticException [Error 10083]: The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF expected 1 aliases but got 2

Added: hive/trunk/ql/src/test/results/clientpositive/select_dummy_source.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/select_dummy_source.q.out?rev=1565054&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/select_dummy_source.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/select_dummy_source.q.out Thu Feb 6 02:47:54 2014
@@ -0,0 +1,295 @@
+PREHOOK: query: explain
+select 'a', 100
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select 'a', 100
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: 'a' (type: string), 100 (type: int)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: select 'a', 100
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select 'a', 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+a 100
+PREHOOK: query: --evaluation
+explain
+select 1 + 1
+PREHOOK: type: QUERY
+POSTHOOK: query: --evaluation
+explain
+select 1 + 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: (1 + 1) (type: int)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: select 1 + 1
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select 1 + 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2
+PREHOOK: query: -- explode (not possible for lateral view)
+explain
+select explode(array('a', 'b'))
+PREHOOK: type: QUERY
+POSTHOOK: query: -- explode (not possible for lateral view)
+explain
+select explode(array('a', 'b'))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: array('a','b') (type: array<string>)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ UDTF Operator
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ function name: explode
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: select explode(array('a', 'b'))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select explode(array('a', 'b'))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+a
+b
+PREHOOK: query: explain
+select 'a', 100
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select 'a', 100
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: 'a' (type: string), 100 (type: int)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: select 'a', 100
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select 'a', 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+a 100
+PREHOOK: query: explain
+select 1 + 1
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select 1 + 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: (1 + 1) (type: int)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: select 1 + 1
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select 1 + 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2
+PREHOOK: query: explain
+select explode(array('a', 'b'))
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select explode(array('a', 'b'))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: array('a','b') (type: array<string>)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ UDTF Operator
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ function name: explode
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: select explode(array('a', 'b'))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select explode(array('a', 'b'))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+a
+b
+PREHOOK: query: -- subquery
+explain
+select 2 + 3,x from (select 1 + 2 x) X
+PREHOOK: type: QUERY
+POSTHOOK: query: -- subquery
+explain
+select 2 + 3,x from (select 1 + 2 x) X
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: (2 + 3) (type: int), (1 + 2) (type: int)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: select 2 + 3,x from (select 1 + 2 x) X
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select 2 + 3,x from (select 1 + 2 x) X
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+5 3

Modified: hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out Thu Feb 6 02:47:54 2014
@@ -50,6 +50,7 @@ covar_pop
  covar_samp
  create_union
  cume_dist
+current_database
  date_add
  date_sub
  datediff
@@ -218,6 +219,7 @@ covar_pop
  covar_samp
  create_union
  cume_dist
+current_database
  PREHOOK: query: SHOW FUNCTIONS '.*e$'
  PREHOOK: type: SHOWFUNCTIONS
  POSTHOOK: query: SHOW FUNCTIONS '.*e$'
@@ -225,6 +227,7 @@ POSTHOOK: type: SHOWFUNCTIONS
  assert_true
  case
  coalesce
+current_database
  decode
  e
  encode

Added: hive/trunk/ql/src/test/results/clientpositive/udf_current_database.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/udf_current_database.q.out?rev=1565054&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/udf_current_database.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/udf_current_database.q.out Thu Feb 6 02:47:54 2014
@@ -0,0 +1,173 @@
+PREHOOK: query: DESCRIBE FUNCTION current_database
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION current_database
+POSTHOOK: type: DESCFUNCTION
+current_database() - returns currently using database name
+PREHOOK: query: explain
+select current_database()
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select current_database()
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: current_database() (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: select current_database()
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select current_database()
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+default
+PREHOOK: query: create database xxx
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database xxx
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: use xxx
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use xxx
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: explain
+select current_database()
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select current_database()
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: current_database() (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+
+PREHOOK: query: select current_database()
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select current_database()
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+xxx
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: explain
+select current_database()
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select current_database()
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: current_database() (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: select current_database()
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select current_database()
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+default
+PREHOOK: query: use xxx
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use xxx
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: explain
+select current_database()
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select current_database()
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: _dummy_table
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: current_database() (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: select current_database()
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select current_database()
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+xxx

Modified: hive/trunk/ql/src/test/results/compiler/errors/invalid_select.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/errors/invalid_select.q.out?rev=1565054&r1=1565053&r2=1565054&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/errors/invalid_select.q.out (original)
+++ hive/trunk/ql/src/test/results/compiler/errors/invalid_select.q.out Thu Feb 6 02:47:54 2014
@@ -1,2 +1 @@
-Parse Error: line 3:6 missing FROM at '(' near '(' in subquery source
-line 3:7 cannot recognize input near 'b' ')' 'FROM' in subquery source
\ No newline at end of file
+Parse Error: line 3:6 missing EOF at '(' near 'trim'
\ No newline at end of file

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
Discussion Overview
groupcommits @
categorieshive, hadoop
postedFeb 6, '14 at 2:48a
activeFeb 6, '14 at 2:48a
posts1
users1
websitehive.apache.org

1 user in discussion

Navis: 1 post

People

Translate

site design / logo © 2021 Grokbase