FAQ
Author: ehans
Date: Wed Dec 4 19:36:32 2013
New Revision: 1547883

URL: http://svn.apache.org/r1547883
Log:
HIVE-5895: vectorization handles division by zero differently from normal execution (Sergey Shelukhin via Eric Hanson)

Added:
     hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt
     hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalar.txt
     hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumn.txt
     hive/trunk/ql/src/test/queries/clientpositive/vectorization_div0.q
     hive/trunk/ql/src/test/results/clientpositive/vectorization_div0.q.out
Modified:
     hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongColumn.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongScalar.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongScalarDivideLongColumn.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
     hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java
     hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorScalarColArithmetic.java
     hive/trunk/ql/src/test/results/clientpositive/vectorization_short_regress.q.out

Modified: hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
URL: http://svn.apache.org/viewvc/hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java?rev=1547883&r1=1547882&r2=1547883&view=diff
==============================================================================
--- hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java (original)
+++ hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java Wed Dec 4 19:36:32 2013
@@ -43,19 +43,16 @@ public class GenVectorCode extends Task
        {"ColumnArithmeticScalar", "Add", "long", "double", "+"},
        {"ColumnArithmeticScalar", "Subtract", "long", "double", "-"},
        {"ColumnArithmeticScalar", "Multiply", "long", "double", "*"},
- {"ColumnArithmeticScalar", "Divide", "long", "double", "/"},
        {"ColumnArithmeticScalar", "Modulo", "long", "double", "%"},

        {"ColumnArithmeticScalar", "Add", "double", "long", "+"},
        {"ColumnArithmeticScalar", "Subtract", "double", "long", "-"},
        {"ColumnArithmeticScalar", "Multiply", "double", "long", "*"},
- {"ColumnArithmeticScalar", "Divide", "double", "long", "/"},
        {"ColumnArithmeticScalar", "Modulo", "double", "long", "%"},

        {"ColumnArithmeticScalar", "Add", "double", "double", "+"},
        {"ColumnArithmeticScalar", "Subtract", "double", "double", "-"},
        {"ColumnArithmeticScalar", "Multiply", "double", "double", "*"},
- {"ColumnArithmeticScalar", "Divide", "double", "double", "/"},
        {"ColumnArithmeticScalar", "Modulo", "double", "double", "%"},

        {"ScalarArithmeticColumn", "Add", "long", "long", "+"},
@@ -66,19 +63,16 @@ public class GenVectorCode extends Task
        {"ScalarArithmeticColumn", "Add", "long", "double", "+"},
        {"ScalarArithmeticColumn", "Subtract", "long", "double", "-"},
        {"ScalarArithmeticColumn", "Multiply", "long", "double", "*"},
- {"ScalarArithmeticColumn", "Divide", "long", "double", "/"},
        {"ScalarArithmeticColumn", "Modulo", "long", "double", "%"},

        {"ScalarArithmeticColumn", "Add", "double", "long", "+"},
        {"ScalarArithmeticColumn", "Subtract", "double", "long", "-"},
        {"ScalarArithmeticColumn", "Multiply", "double", "long", "*"},
- {"ScalarArithmeticColumn", "Divide", "double", "long", "/"},
        {"ScalarArithmeticColumn", "Modulo", "double", "long", "%"},

        {"ScalarArithmeticColumn", "Add", "double", "double", "+"},
        {"ScalarArithmeticColumn", "Subtract", "double", "double", "-"},
        {"ScalarArithmeticColumn", "Multiply", "double", "double", "*"},
- {"ScalarArithmeticColumn", "Divide", "double", "double", "/"},
        {"ScalarArithmeticColumn", "Modulo", "double", "double", "%"},

        {"ColumnArithmeticColumn", "Add", "long", "long", "+"},
@@ -89,21 +83,28 @@ public class GenVectorCode extends Task
        {"ColumnArithmeticColumn", "Add", "long", "double", "+"},
        {"ColumnArithmeticColumn", "Subtract", "long", "double", "-"},
        {"ColumnArithmeticColumn", "Multiply", "long", "double", "*"},
- {"ColumnArithmeticColumn", "Divide", "long", "double", "/"},
        {"ColumnArithmeticColumn", "Modulo", "long", "double", "%"},

        {"ColumnArithmeticColumn", "Add", "double", "long", "+"},
        {"ColumnArithmeticColumn", "Subtract", "double", "long", "-"},
        {"ColumnArithmeticColumn", "Multiply", "double", "long", "*"},
- {"ColumnArithmeticColumn", "Divide", "double", "long", "/"},
        {"ColumnArithmeticColumn", "Modulo", "double", "long", "%"},

        {"ColumnArithmeticColumn", "Add", "double", "double", "+"},
        {"ColumnArithmeticColumn", "Subtract", "double", "double", "-"},
        {"ColumnArithmeticColumn", "Multiply", "double", "double", "*"},
- {"ColumnArithmeticColumn", "Divide", "double", "double", "/"},
        {"ColumnArithmeticColumn", "Modulo", "double", "double", "%"},

+ {"ColumnDivideScalar", "Divide", "long", "double", "/"},
+ {"ColumnDivideScalar", "Divide", "double", "long", "/"},
+ {"ColumnDivideScalar", "Divide", "double", "double", "/"},
+ {"ScalarDivideColumn", "Divide", "long", "double", "/"},
+ {"ScalarDivideColumn", "Divide", "double", "long", "/"},
+ {"ScalarDivideColumn", "Divide", "double", "double", "/"},
+ {"ColumnDivideColumn", "Divide", "long", "double", "/"},
+ {"ColumnDivideColumn", "Divide", "double", "long", "/"},
+ {"ColumnDivideColumn", "Divide", "double", "double", "/"},
+
        {"ColumnCompareScalar", "Equal", "long", "double", "=="},
        {"ColumnCompareScalar", "Equal", "double", "double", "=="},
        {"ColumnCompareScalar", "NotEqual", "long", "double", "!="},
@@ -518,7 +519,7 @@ public class GenVectorCode extends Task
    private void generate() throws Exception {
      System.out.println("Generating vector expression code");
      for (String [] tdesc : templateExpansions) {
- if (tdesc[0].equals("ColumnArithmeticScalar")) {
+ if (tdesc[0].equals("ColumnArithmeticScalar") || tdesc[0].equals("ColumnDivideScalar")) {
          generateColumnArithmeticScalar(tdesc);
        } else if (tdesc[0].equals("ColumnCompareScalar")) {
          generateColumnCompareScalar(tdesc);
@@ -530,13 +531,13 @@ public class GenVectorCode extends Task
          generateFilterScalarCompareColumn(tdesc);
        } else if (tdesc[0].equals("FilterColumnBetween")) {
          generateFilterColumnBetween(tdesc);
- } else if (tdesc[0].equals("ScalarArithmeticColumn")) {
+ } else if (tdesc[0].equals("ScalarArithmeticColumn") || tdesc[0].equals("ScalarDivideColumn")) {
          generateScalarArithmeticColumn(tdesc);
        } else if (tdesc[0].equals("FilterColumnCompareColumn")) {
          generateFilterColumnCompareColumn(tdesc);
        } else if (tdesc[0].equals("ColumnCompareColumn")) {
          generateColumnCompareColumn(tdesc);
- } else if (tdesc[0].equals("ColumnArithmeticColumn")) {
+ } else if (tdesc[0].equals("ColumnArithmeticColumn") || tdesc[0].equals("ColumnDivideColumn")) {
          generateColumnArithmeticColumn(tdesc);
        } else if (tdesc[0].equals("ColumnUnaryMinus")) {
          generateColumnUnaryMinus(tdesc);

Added: hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt?rev=1547883&view=auto
==============================================================================
--- hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt (added)
+++ hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt Wed Dec 4 19:36:32 2013
@@ -0,0 +1,194 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template ColumnArithmeticColumn.txt, which covers binary arithmetic
+ * expressions between columns.
+ */
+public class <ClassName> extends VectorExpression {
+
+ private static final long serialVersionUID = 1L;
+
+ private int colNum1;
+ private int colNum2;
+ private int outputColumn;
+
+ public <ClassName>(int colNum1, int colNum2, int outputColumn) {
+ this.colNum1 = colNum1;
+ this.colNum2 = colNum2;
+ this.outputColumn = outputColumn;
+ }
+
+ public <ClassName>() {
+ }
+
+ @Override
+ public void evaluate(VectorizedRowBatch batch) {
+
+ if (childExpressions != null) {
+ super.evaluateChildren(batch);
+ }
+
+ <InputColumnVectorType1> inputColVector1 = (<InputColumnVectorType1>) batch.cols[colNum1];
+ <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum2];
+ <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+ int[] sel = batch.selected;
+ int n = batch.size;
+ <OperandType1>[] vector1 = inputColVector1.vector;
+ <OperandType2>[] vector2 = inputColVector2.vector;
+ <ReturnType>[] outputVector = outputColVector.vector;
+
+ // return immediately if batch is empty
+ if (n == 0) {
+ return;
+ }
+
+ outputColVector.isRepeating =
+ inputColVector1.isRepeating && inputColVector2.isRepeating
+ || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+ || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+ // Handle nulls first
+ NullUtil.propagateNullsColCol(
+ inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+ /* Disregard nulls for processing. In other words,
+ * the arithmetic operation is performed even if one or
+ * more inputs are null. This is to improve speed by avoiding
+ * conditional checks in the inner loop.
+ */
+ boolean hasDivBy0 = false;
+ if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+ <OperandType2> denom = vector2[0];
+ outputVector[0] = vector1[0] <OperatorSymbol> (double) denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
+ } else if (inputColVector1.isRepeating) {
+ if (batch.selectedInUse) {
+ for(int j = 0; j != n; j++) {
+ int i = sel[j];
+ <OperandType2> denom = vector2[i];
+ outputVector[i] = vector1[0] <OperatorSymbol> (double) denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
+ }
+ } else {
+ for(int i = 0; i != n; i++) {
+ <OperandType2> denom = vector2[i];
+ outputVector[i] = vector1[0] <OperatorSymbol> (double) denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
+ }
+ }
+ } else if (inputColVector2.isRepeating) {
+ if (vector2[0] == 0) {
+ // Denominator is zero, convert the batch to nulls
+ outputColVector.noNulls = false;
+ outputColVector.isRepeating = true;
+ outputColVector.isNull[0] = true;
+ } else if (batch.selectedInUse) {
+ for(int j = 0; j != n; j++) {
+ int i = sel[j];
+ outputVector[i] = vector1[i] <OperatorSymbol> vector2[0];
+ }
+ } else {
+ for(int i = 0; i != n; i++) {
+ outputVector[i] = vector1[i] <OperatorSymbol> vector2[0];
+ }
+ }
+ } else {
+ if (batch.selectedInUse) {
+ for(int j = 0; j != n; j++) {
+ int i = sel[j];
+ <OperandType2> denom = vector2[i];
+ outputVector[i] = vector1[i] <OperatorSymbol> denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
+ }
+ } else {
+ for(int i = 0; i != n; i++) {
+ <OperandType2> denom = vector2[i];
+ outputVector[i] = vector1[i] <OperatorSymbol> denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
+ }
+ }
+ }
+
+ /* For the case when the output can have null values, follow
+ * the convention that the data values must be 1 for long and
+ * NaN for double. This is to prevent possible later zero-divide errors
+ * in complex arithmetic expressions like col2 <OperatorSymbol> (col1 - 1)
+ * in the case when some col1 entries are null.
+ */
+ if (!hasDivBy0) {
+ NullUtil.setNullDataEntries<CamelReturnType>(outputColVector, batch.selectedInUse, sel, n);
+ } else {
+ // Currently, the output from division is always double.
+ NullUtil.setNullAndDivBy0DataEntriesDouble(
+ outputColVector, batch.selectedInUse, sel, n, inputColVector2);
+ }
+ }
+
+ @Override
+ public int getOutputColumn() {
+ return outputColumn;
+ }
+
+ @Override
+ public String getOutputType() {
+ return "<ReturnType>";
+ }
+
+ public int getColNum1() {
+ return colNum1;
+ }
+
+ public void setColNum1(int colNum1) {
+ this.colNum1 = colNum1;
+ }
+
+ public int getColNum2() {
+ return colNum2;
+ }
+
+ public void setColNum2(int colNum2) {
+ this.colNum2 = colNum2;
+ }
+
+ public void setOutputColumn(int outputColumn) {
+ this.outputColumn = outputColumn;
+ }
+
+ @Override
+ public VectorExpressionDescriptor.Descriptor getDescriptor() {
+ return (new VectorExpressionDescriptor.Builder())
+ .setMode(
+ VectorExpressionDescriptor.Mode.PROJECTION)
+ .setNumArguments(2)
+ .setArgumentTypes(
+ VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+ VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+ .setInputExpressionTypes(
+ VectorExpressionDescriptor.InputExpressionType.COLUMN,
+ VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+ }
+}

Added: hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalar.txt
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalar.txt?rev=1547883&view=auto
==============================================================================
--- hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalar.txt (added)
+++ hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideScalar.txt Wed Dec 4 19:36:32 2013
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.<InputColumnVectorType>;
+import org.apache.hadoop.hive.ql.exec.vector.<OutputColumnVectorType>;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/**
+ * Generated from template ColumnArithmeticScalar.txt, which covers binary arithmetic
+ * expressions between a column and a scalar.
+ */
+public class <ClassName> extends VectorExpression {
+
+ private static final long serialVersionUID = 1L;
+
+ private int colNum;
+ private <OperandType2> value;
+ private int outputColumn;
+
+ public <ClassName>(int colNum, <OperandType2> value, int outputColumn) {
+ this.colNum = colNum;
+ this.value = value;
+ this.outputColumn = outputColumn;
+ }
+
+ public <ClassName>() {
+ }
+
+ @Override
+ public void evaluate(VectorizedRowBatch batch) {
+
+ if (childExpressions != null) {
+ super.evaluateChildren(batch);
+ }
+
+ <InputColumnVectorType> inputColVector = (<InputColumnVectorType>) batch.cols[colNum];
+ <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+ int[] sel = batch.selected;
+ boolean[] inputIsNull = inputColVector.isNull;
+ boolean[] outputIsNull = outputColVector.isNull;
+ outputColVector.noNulls = inputColVector.noNulls;
+ outputColVector.isRepeating = inputColVector.isRepeating;
+ int n = batch.size;
+ <OperandType1>[] vector = inputColVector.vector;
+ <ReturnType>[] outputVector = outputColVector.vector;
+
+ // return immediately if batch is empty
+ if (n == 0) {
+ return;
+ }
+
+ if (value == 0) {
+ // Denominator is zero, convert the batch to nulls
+ outputColVector.noNulls = false;
+ outputColVector.isRepeating = true;
+ outputIsNull[0] = true;
+ } else if (inputColVector.isRepeating) {
+ outputVector[0] = vector[0] <OperatorSymbol> value;
+
+ // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+ outputIsNull[0] = inputIsNull[0];
+ } else if (inputColVector.noNulls) {
+ if (batch.selectedInUse) {
+ for(int j = 0; j != n; j++) {
+ int i = sel[j];
+ outputVector[i] = vector[i] <OperatorSymbol> value;
+ }
+ } else {
+ for(int i = 0; i != n; i++) {
+ outputVector[i] = vector[i] <OperatorSymbol> value;
+ }
+ }
+ } else /* there are nulls */ {
+ if (batch.selectedInUse) {
+ for(int j = 0; j != n; j++) {
+ int i = sel[j];
+ outputVector[i] = vector[i] <OperatorSymbol> value;
+ outputIsNull[i] = inputIsNull[i];
+ }
+ } else {
+ for(int i = 0; i != n; i++) {
+ outputVector[i] = vector[i] <OperatorSymbol> value;
+ }
+ System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+ }
+ }
+
+ NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+ }
+
+ @Override
+ public int getOutputColumn() {
+ return outputColumn;
+ }
+
+ @Override
+ public String getOutputType() {
+ return "<ReturnType>";
+ }
+
+ public int getColNum() {
+ return colNum;
+ }
+
+ public void setColNum(int colNum) {
+ this.colNum = colNum;
+ }
+
+ public <OperandType2> getValue() {
+ return value;
+ }
+
+ public void setValue(<OperandType2> value) {
+ this.value = value;
+ }
+
+ public void setOutputColumn(int outputColumn) {
+ this.outputColumn = outputColumn;
+ }
+
+ @Override
+ public VectorExpressionDescriptor.Descriptor getDescriptor() {
+ return (new VectorExpressionDescriptor.Builder())
+ .setMode(
+ VectorExpressionDescriptor.Mode.PROJECTION)
+ .setNumArguments(2)
+ .setArgumentTypes(
+ VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+ VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+ .setInputExpressionTypes(
+ VectorExpressionDescriptor.InputExpressionType.COLUMN,
+ VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+ }
+}

Added: hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumn.txt
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumn.txt?rev=1547883&view=auto
==============================================================================
--- hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumn.txt (added)
+++ hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumn.txt Wed Dec 4 19:36:32 2013
@@ -0,0 +1,178 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+
+/*
+ * Because of the templatized nature of the code, either or both
+ * of these ColumnVector imports may be needed. Listing both of them
+ * rather than using ....vectorization.*;
+ */
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
+
+/**
+ * Generated from template ScalarArithmeticColumn.txt.
+ * Implements a vectorized arithmetic operator with a scalar on the left and a
+ * column vector on the right. The result is output to an output column vector.
+ */
+public class <ClassName> extends VectorExpression {
+
+ private static final long serialVersionUID = 1L;
+
+ private int colNum;
+ private <OperandType1> value;
+ private int outputColumn;
+
+ public <ClassName>(<OperandType1> value, int colNum, int outputColumn) {
+ this.colNum = colNum;
+ this.value = value;
+ this.outputColumn = outputColumn;
+ }
+
+ public <ClassName>() {
+ }
+
+ @Override
+ /**
+ * Method to evaluate scalar-column operation in vectorized fashion.
+ *
+ * @batch a package of rows with each column stored in a vector
+ */
+ public void evaluate(VectorizedRowBatch batch) {
+
+ if (childExpressions != null) {
+ super.evaluateChildren(batch);
+ }
+
+ <InputColumnVectorType> inputColVector = (<InputColumnVectorType>) batch.cols[colNum];
+ <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+ int[] sel = batch.selected;
+ boolean[] inputIsNull = inputColVector.isNull;
+ boolean[] outputIsNull = outputColVector.isNull;
+ outputColVector.noNulls = inputColVector.noNulls;
+ outputColVector.isRepeating = inputColVector.isRepeating;
+ int n = batch.size;
+ <OperandType2>[] vector = inputColVector.vector;
+ <ReturnType>[] outputVector = outputColVector.vector;
+
+ // return immediately if batch is empty
+ if (n == 0) {
+ return;
+ }
+
+ boolean hasDivBy0 = false;
+ if (inputColVector.isRepeating) {
+ <OperandType2> denom = vector[0];
+ outputVector[0] = value <OperatorSymbol> denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
+
+ // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+ outputIsNull[0] = inputIsNull[0];
+ } else if (inputColVector.noNulls) {
+ if (batch.selectedInUse) {
+ for(int j = 0; j != n; j++) {
+ int i = sel[j];
+ <OperandType2> denom = vector[i];
+ outputVector[i] = value <OperatorSymbol> denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
+ }
+ } else {
+ for(int i = 0; i != n; i++) {
+ <OperandType2> denom = vector[i];
+ outputVector[i] = value <OperatorSymbol> denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
+ }
+ }
+ } else { /* there are nulls */
+ if (batch.selectedInUse) {
+ for(int j = 0; j != n; j++) {
+ int i = sel[j];
+ <OperandType2> denom = vector[i];
+ outputVector[i] = value <OperatorSymbol> denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
+ outputIsNull[i] = inputIsNull[i];
+ }
+ } else {
+ for(int i = 0; i != n; i++) {
+ <OperandType2> denom = vector[i];
+ outputVector[i] = value <OperatorSymbol> denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
+ }
+ System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+ }
+ }
+
+ if (!hasDivBy0) {
+ NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+ } else {
+ // Currently, the output from division is always double.
+ NullUtil.setNullAndDivBy0DataEntriesDouble(
+ outputColVector, batch.selectedInUse, sel, n, inputColVector);
+ }
+ }
+
+ @Override
+ public int getOutputColumn() {
+ return outputColumn;
+ }
+
+ @Override
+ public String getOutputType() {
+ return "<ReturnType>";
+ }
+
+ public int getColNum() {
+ return colNum;
+ }
+
+ public void setColNum(int colNum) {
+ this.colNum = colNum;
+ }
+
+ public <OperandType1> getValue() {
+ return value;
+ }
+
+ public void setValue(<OperandType1> value) {
+ this.value = value;
+ }
+
+ public void setOutputColumn(int outputColumn) {
+ this.outputColumn = outputColumn;
+ }
+
+ @Override
+ public VectorExpressionDescriptor.Descriptor getDescriptor() {
+ return (new VectorExpressionDescriptor.Builder())
+ .setMode(
+ VectorExpressionDescriptor.Mode.PROJECTION)
+ .setNumArguments(2)
+ .setArgumentTypes(
+ VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+ VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
+ .setInputExpressionTypes(
+ VectorExpressionDescriptor.InputExpressionType.SCALAR,
+ VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+ }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongColumn.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongColumn.java?rev=1547883&r1=1547882&r2=1547883&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongColumn.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongColumn.java Wed Dec 4 19:36:32 2013
@@ -77,21 +77,32 @@ public class LongColDivideLongColumn ext
       * more inputs are null. This is to improve speed by avoiding
       * conditional checks in the inner loop.
       */
+ boolean hasDivBy0 = false;
      if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
- outputVector[0] = vector1[0] / (double) vector2[0];
+ long denom = vector2[0];
+ outputVector[0] = vector1[0] / (double) denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
      } else if (inputColVector1.isRepeating) {
        if (batch.selectedInUse) {
          for(int j = 0; j != n; j++) {
            int i = sel[j];
- outputVector[i] = vector1[0] / (double) vector2[i];
+ long denom = vector2[i];
+ outputVector[i] = vector1[0] / (double) denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
          }
        } else {
          for(int i = 0; i != n; i++) {
- outputVector[i] = vector1[0] / (double) vector2[i];
+ long denom = vector2[i];
+ outputVector[i] = vector1[0] / (double) denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
          }
        }
      } else if (inputColVector2.isRepeating) {
- if (batch.selectedInUse) {
+ if (vector2[0] == 0) {
+ outputColVector.noNulls = false;
+ outputColVector.isRepeating = true;
+ outputColVector.isNull[0] = true;
+ } else if (batch.selectedInUse) {
          for(int j = 0; j != n; j++) {
            int i = sel[j];
            outputVector[i] = vector1[i] / (double) vector2[0];
@@ -105,11 +116,15 @@ public class LongColDivideLongColumn ext
        if (batch.selectedInUse) {
          for(int j = 0; j != n; j++) {
            int i = sel[j];
- outputVector[i] = vector1[i] / (double) vector2[i];
+ long denom = vector2[i];
+ outputVector[i] = vector1[i] / (double) denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
          }
        } else {
          for(int i = 0; i != n; i++) {
- outputVector[i] = vector1[i] / (double) vector2[i];
+ long denom = vector2[i];
+ outputVector[i] = vector1[i] / (double) denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
          }
        }
      }
@@ -120,7 +135,12 @@ public class LongColDivideLongColumn ext
       * in complex arithmetic expressions like col2 / (col1 - 1)
       * in the case when some col1 entries are null.
       */
- NullUtil.setNullDataEntriesDouble(outputColVector, batch.selectedInUse, sel, n);
+ if (!hasDivBy0) {
+ NullUtil.setNullDataEntriesDouble(outputColVector, batch.selectedInUse, sel, n);
+ } else {
+ NullUtil.setNullAndDivBy0DataEntriesDouble(
+ outputColVector, batch.selectedInUse, sel, n, inputColVector2);
+ }
    }

    @Override

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongScalar.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongScalar.java?rev=1547883&r1=1547882&r2=1547883&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongScalar.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongColDivideLongScalar.java Wed Dec 4 19:36:32 2013
@@ -68,9 +68,13 @@ public class LongColDivideLongScalar ext
        return;
      }

- if (inputColVector.isRepeating) {
+ if (value == 0) {
+ // Denominator is zero, convert the batch to nulls
+ outputColVector.noNulls = false;
+ outputColVector.isRepeating = true;
+ outputIsNull[0] = true;
+ } else if (inputColVector.isRepeating) {
        outputVector[0] = vector[0] / (double) value;
-
        // Even if there are no nulls, we always copy over entry 0. Simplifies code.
        outputIsNull[0] = inputIsNull[0];
      } else if (inputColVector.noNulls) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongScalarDivideLongColumn.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongScalarDivideLongColumn.java?rev=1547883&r1=1547882&r2=1547883&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongScalarDivideLongColumn.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/LongScalarDivideLongColumn.java Wed Dec 4 19:36:32 2013
@@ -68,8 +68,11 @@ public class LongScalarDivideLongColumn
        return;
      }

+ boolean hasDivBy0 = false;
      if (inputColVector.isRepeating) {
- outputVector[0] = value / vector[0];
+ long denom = vector[0];
+ outputVector[0] = value / denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);

        // Even if there are no nulls, we always copy over entry 0. Simplifies code.
        outputIsNull[0] = inputIsNull[0];
@@ -77,23 +80,31 @@ public class LongScalarDivideLongColumn
        if (batch.selectedInUse) {
          for(int j = 0; j != n; j++) {
            int i = sel[j];
- outputVector[i] = value / vector[i];
+ long denom = vector[i];
+ outputVector[i] = value / denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
          }
        } else {
          for(int i = 0; i != n; i++) {
- outputVector[i] = value / vector[i];
+ long denom = vector[i];
+ outputVector[i] = value / denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
          }
        }
      } else /* there are nulls */ {
        if (batch.selectedInUse) {
          for(int j = 0; j != n; j++) {
            int i = sel[j];
- outputVector[i] = value / vector[i];
+ long denom = vector[i];
+ outputVector[i] = value / denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
            outputIsNull[i] = inputIsNull[i];
          }
        } else {
          for(int i = 0; i != n; i++) {
- outputVector[i] = value / vector[i];
+ long denom = vector[i];
+ outputVector[i] = value / denom;
+ hasDivBy0 = hasDivBy0 || (denom == 0);
          }
          System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
        }
@@ -103,7 +114,12 @@ public class LongScalarDivideLongColumn
       * Unlike other col-scalar operations, this one doesn't benefit from carrying
       * over NaN values from the input array.
       */
- NullUtil.setNullDataEntriesDouble(outputColVector, batch.selectedInUse, sel, n);
+ if (!hasDivBy0) {
+ NullUtil.setNullDataEntriesDouble(outputColVector, batch.selectedInUse, sel, n);
+ } else {
+ NullUtil.setNullAndDivBy0DataEntriesDouble(
+ outputColVector, batch.selectedInUse, sel, n, inputColVector);
+ }
    }

    @Override

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java?rev=1547883&r1=1547882&r2=1547883&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java Wed Dec 4 19:36:32 2013
@@ -27,7 +27,6 @@ import org.apache.hadoop.hive.ql.exec.ve
   * Utility functions to handle null propagation.
   */
  public class NullUtil {
-
    /*
     * Set the data value for all NULL entries to the designated NULL_VALUE.
     */
@@ -42,7 +41,7 @@ public class NullUtil {
          int i = sel[j];
          if(v.isNull[i]) {
            v.vector[i] = LongColumnVector.NULL_VALUE;
- }
+ }
        }
      } else {
        for (int i = 0; i != n; i++) {
@@ -50,7 +49,7 @@ public class NullUtil {
            v.vector[i] = LongColumnVector.NULL_VALUE;
          }
        }
- }
+ }
    }

    // for use by Column-Scalar and Scalar-Column arithmetic for null propagation
@@ -77,28 +76,84 @@ public class NullUtil {
      } else if (selectedInUse) {
        for (int j = 0; j != n; j++) {
          int i = sel[j];
- if(v.isNull[i]) {
+ if (v.isNull[i]) {
            v.vector[i] = DoubleColumnVector.NULL_VALUE;
- }
+ }
        }
      } else {
        for (int i = 0; i != n; i++) {
- if(v.isNull[i]) {
+ if (v.isNull[i]) {
            v.vector[i] = DoubleColumnVector.NULL_VALUE;
          }
        }
- }
+ }
    }
-
+
+ /*
+ * Set the data value for all NULL entries, as well as those coming from division by zero,
+ * to NaN. Assumes there are entries coming from division by zero.
+ * We assume that infinities do not appear legally in the result, so we can replace all of them.
+ */
+ public static void setNullAndDivBy0DataEntriesDouble(
+ DoubleColumnVector v, boolean selectedInUse, int[] sel, int n, LongColumnVector denoms) {
+ assert v.isRepeating || !denoms.isRepeating;
+ v.noNulls = false;
+ long[] vector = denoms.vector;
+ if (v.isRepeating && (v.isNull[0] = (v.isNull[0] || vector[0] == 0))) {
+ v.vector[0] = DoubleColumnVector.NULL_VALUE;
+ } else if (selectedInUse) {
+ for (int j = 0; j != n; j++) {
+ int i = sel[j];
+ if (v.isNull[i] = (v.isNull[i] || vector[i] == 0)) {
+ v.vector[i] = DoubleColumnVector.NULL_VALUE;
+ }
+ }
+ } else {
+ for (int i = 0; i != n; i++) {
+ if (v.isNull[i] = (v.isNull[i] || vector[i] == 0)) {
+ v.vector[i] = DoubleColumnVector.NULL_VALUE;
+ }
+ }
+ }
+ }
+
+ /*
+ * Set the data value for all NULL entries, as well as those coming from division by zero,
+ * to NaN. Assumes there are entries coming from division by zero.
+ * We assume that infinities do not appear legally in the result, so we can replace all of them.
+ */
+ public static void setNullAndDivBy0DataEntriesDouble(
+ DoubleColumnVector v, boolean selectedInUse, int[] sel, int n, DoubleColumnVector denoms) {
+ assert v.isRepeating || !denoms.isRepeating;
+ v.noNulls = false;
+ double[] vector = denoms.vector;
+ if (v.isRepeating && (v.isNull[0] = (v.isNull[0] || vector[0] == 0))) {
+ v.vector[0] = DoubleColumnVector.NULL_VALUE;
+ } else if (selectedInUse) {
+ for (int j = 0; j != n; j++) {
+ int i = sel[j];
+ if (v.isNull[i] = (v.isNull[i] || vector[i] == 0)) {
+ v.vector[i] = DoubleColumnVector.NULL_VALUE;
+ }
+ }
+ } else {
+ for (int i = 0; i != n; i++) {
+ if (v.isNull[i] = (v.isNull[i] || vector[i] == 0)) {
+ v.vector[i] = DoubleColumnVector.NULL_VALUE;
+ }
+ }
+ }
+ }
+
    /*
     * Propagate null values for a two-input operator.
     */
    public static void propagateNullsColCol(ColumnVector inputColVector1,
- ColumnVector inputColVector2, ColumnVector outputColVector, int[] sel,
+ ColumnVector inputColVector2, ColumnVector outputColVector, int[] sel,
        int n, boolean selectedInUse) {

      outputColVector.noNulls = inputColVector1.noNulls && inputColVector2.noNulls;
-
+
      if (inputColVector1.noNulls && !inputColVector2.noNulls) {
        if (inputColVector2.isRepeating) {
          outputColVector.isNull[0] = inputColVector2.isNull[0];
@@ -144,11 +199,11 @@ public class NullUtil {
                 outputColVector.isNull[i] = inputColVector2.isNull[i];
               }
            } else {
-
+
              // copy nulls from the non-repeating side
              System.arraycopy(inputColVector2.isNull, 0, outputColVector.isNull, 0, n);
            }
- }
+ }
        } else if (!inputColVector1.isRepeating && inputColVector2.isRepeating) {
          if (inputColVector2.isNull[0]) {
            outputColVector.isNull[0] = true;
@@ -161,11 +216,10 @@ public class NullUtil {
                 outputColVector.isNull[i] = inputColVector1.isNull[i];
               }
            } else {
-
              // copy nulls from the non-repeating side
              System.arraycopy(inputColVector1.isNull, 0, outputColVector.isNull, 0, n);
            }
- }
+ }
        } else { // neither side is repeating
          if (selectedInUse) {
            for(int j = 0; j != n; j++) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java?rev=1547883&r1=1547882&r2=1547883&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java Wed Dec 4 19:36:32 2013
@@ -636,8 +636,12 @@ public class Vectorizer implements Physi
    }

    private boolean validateExprNodeDescRecursive(ExprNodeDesc desc) {
- boolean ret = validateDataType(desc.getTypeInfo().getTypeName());
+ String typeName = desc.getTypeInfo().getTypeName();
+ boolean ret = validateDataType(typeName);
      if (!ret) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Cannot vectorize " + desc.getExprString() + " of type " + typeName);
+ }
        return false;
      }
      if (desc instanceof ExprNodeGenericFuncDesc) {
@@ -669,6 +673,7 @@ public class Vectorizer implements Physi
      try {
        VectorizationContext vc = new ValidatorVectorizationContext();
        if (vc.getVectorExpression(desc, mode) == null) {
+ // TODO: this cannot happen - VectorizationContext throws in such cases.
          return false;
        }
      } catch (HiveException e) {

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java?rev=1547883&r1=1547882&r2=1547883&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorArithmeticExpressions.java Wed Dec 4 19:36:32 2013
@@ -251,7 +251,6 @@ public class TestVectorArithmeticExpress

    @Test
    public void testLongColDivideLongColumn() {
-
      /* Testing for equality of doubles after a math operation is
       * not always reliable so use this as a tolerance.
       */
@@ -270,6 +269,8 @@ public class TestVectorArithmeticExpress
      expr.evaluate(batch);

      // 0/0 for entry 0 should work but generate NaN
+ assertFalse(out.noNulls);
+ assertTrue(out.isNull[0]);
      assertTrue(Double.isNaN(out.vector[0]));

      // verify NULL output in entry 1 is correct

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorScalarColArithmetic.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorScalarColArithmetic.java?rev=1547883&r1=1547882&r2=1547883&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorScalarColArithmetic.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorScalarColArithmetic.java Wed Dec 4 19:36:32 2013
@@ -179,6 +179,7 @@ public class TestVectorScalarColArithmet
      expr.evaluate(batch);

      // verify NULL output in entry 0 is correct
+ assertFalse(out.noNulls);
      assertTrue(out.isNull[0]);
      assertTrue(Double.isNaN(out.vector[0]));

@@ -186,7 +187,6 @@ public class TestVectorScalarColArithmet
      for (int i = 1; i != batch.size; i++) {
        assertTrue(equalsWithinTolerance((i * 37) / 100d, out.vector[i]));
      }
- assertFalse(out.noNulls);
      assertFalse(out.isRepeating);
    }

@@ -203,7 +203,8 @@ public class TestVectorScalarColArithmet
      expr.evaluate(batch);

      // verify zero-divide result for position 0
- assertTrue(Double.isInfinite(out.vector[0]));
+ assertTrue(out.isNull[0]);
+ assertTrue(Double.isNaN(out.vector[0]));

      // verify NULL output in entry 1 is correct
      assertTrue(out.isNull[1]);

Added: hive/trunk/ql/src/test/queries/clientpositive/vectorization_div0.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/vectorization_div0.q?rev=1547883&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/vectorization_div0.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/vectorization_div0.q Wed Dec 4 19:36:32 2013
@@ -0,0 +1,24 @@
+SET hive.vectorized.execution.enabled = true;
+
+-- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and constants
+explain
+select cdouble / 0.0 from alltypesorc limit 100;
+select cdouble / 0.0 from alltypesorc limit 100;
+
+-- There are no zeros in the table, but there is 988888, so use it as zero
+
+-- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and constants as numerators
+explain
+select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / (cbigint - 988888L)
+from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 limit 100;
+select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / (cbigint - 988888L)
+from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 limit 100;
+
+-- There are no zeros in the table, but there is -200.0, so use it as zero
+
+explain
+select (cdouble + 200.0) as s1, cbigint / (cdouble + 200.0) as s2, (cdouble + 200.0) / (cdouble + 200.0), cbigint / (cdouble + 200.0), 1 / (cdouble + 200.0), 1.2 / (cdouble + 200.0)
+from alltypesorc where cdouble >= -500 and cdouble < -199 order by s1, s2 limit 100;
+select (cdouble + 200.0) as s1, cbigint / (cdouble + 200.0) as s2, (cdouble + 200.0) / (cdouble + 200.0), cbigint / (cdouble + 200.0), 1 / (cdouble + 200.0), 1.2 / (cdouble + 200.0)
+from alltypesorc where cdouble >= -500 and cdouble < -199 order by s1, s2 limit 100;
+

Added: hive/trunk/ql/src/test/results/clientpositive/vectorization_div0.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/vectorization_div0.q.out?rev=1547883&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/vectorization_div0.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/vectorization_div0.q.out Wed Dec 4 19:36:32 2013
@@ -0,0 +1,532 @@
+PREHOOK: query: -- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and constants
+explain
+select cdouble / 0.0 from alltypesorc limit 100
+PREHOOK: type: QUERY
+POSTHOOK: query: -- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and constants
+explain
+select cdouble / 0.0 from alltypesorc limit 100
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypesorc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (/ (TOK_TABLE_OR_COL cdouble) 0.0))) (TOK_LIMIT 100)))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ alltypesorc
+ TableScan
+ alias: alltypesorc
+ Select Operator
+ expressions:
+ expr: (cdouble / 0.0)
+ type: double
+ outputColumnNames: _col0
+ Vectorized execution: true
+ Limit
+ Vectorized execution: true
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ Vectorized execution: true
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: 100
+
+
+PREHOOK: query: select cdouble / 0.0 from alltypesorc limit 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select cdouble / 0.0 from alltypesorc limit 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+NULL
+PREHOOK: query: -- There are no zeros in the table, but there is 988888, so use it as zero
+
+-- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and constants as numerators
+explain
+select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / (cbigint - 988888L)
+from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 limit 100
+PREHOOK: type: QUERY
+POSTHOOK: query: -- There are no zeros in the table, but there is 988888, so use it as zero
+
+-- TODO: add more stuff here after HIVE-5918 is fixed, such as cbigint and constants as numerators
+explain
+select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / (cbigint - 988888L)
+from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 limit 100
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypesorc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (- (TOK_TABLE_OR_COL cbigint) 988888L) s1) (TOK_SELEXPR (/ (TOK_TABLE_OR_COL cdouble) (- (TOK_TABLE_OR_COL cbigint) 988888L)) s2) (TOK_SELEXPR (/ 1.2 (- (TOK_TABLE_OR_COL cbigint) 988888L)))) (TOK_WHERE (and (> (TOK_TABLE_OR_COL cbigint) 0) (< (TOK_TABLE_OR_COL cbigint) 100000000))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL s1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL s2))) (TOK_LIMIT 100)))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ alltypesorc
+ TableScan
+ alias: alltypesorc
+ Filter Operator
+ predicate:
+ expr: ((cbigint > 0) and (cbigint < 100000000))
+ type: boolean
+ Vectorized execution: true
+ Select Operator
+ expressions:
+ expr: (cbigint - 988888)
+ type: bigint
+ expr: (cdouble / (cbigint - 988888))
+ type: double
+ expr: (1.2 / (cbigint - 988888))
+ type: double
+ outputColumnNames: _col0, _col1, _col2
+ Vectorized execution: true
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: bigint
+ expr: _col1
+ type: double
+ sort order: ++
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: bigint
+ expr: _col1
+ type: double
+ expr: _col2
+ type: double
+ Vectorized execution: true
+ Reduce Operator Tree:
+ Extract
+ Limit
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: 100
+
+
+PREHOOK: query: select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / (cbigint - 988888L)
+from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 limit 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select (cbigint - 988888L) as s1, cdouble / (cbigint - 988888L) as s2, 1.2 / (cbigint - 988888L)
+from alltypesorc where cbigint > 0 and cbigint < 100000000 order by s1, s2 limit 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+-985319 NULL -1.21787969175465E-6
+-985319 2.0297994862577501E-4 -1.21787969175465E-6
+-63925 0.11256941728588189 -1.8771998435666796E-5
+0 NULL NULL
+0 NULL NULL
+0 NULL NULL
+0 NULL NULL
+0 NULL NULL
+0 NULL NULL
+0 NULL NULL
+0 NULL NULL
+0 NULL NULL
+392309 NULL 3.05881333336732E-6
+673083 -0.010691103474608629 1.7828410463494101E-6
+2331159 NULL 5.147654021025593E-7
+2342037 NULL 5.123744842630582E-7
+3533105 -5.660743170667161E-5 3.3964459024002967E-7
+3768727 0.004139594085748318 3.184099033970887E-7
+4728619 NULL 2.5377388197272816E-7
+5391403 NULL 2.2257657236901044E-7
+7022666 -0.0010246820794268159 1.708752772807364E-7
+7470430 NULL 1.6063332365071354E-7
+8276429 NULL 1.4499006757624573E-7
+8286860 -8.683626850218298E-4 1.44807562816314E-7
+8299981 -8.669899364829872E-4 1.445786442161735E-7
+9247593 NULL 1.297634962957388E-7
+9821695 -7.326637611939691E-4 1.2217850381222386E-7
+10000738 0.001559984873116364 1.1999114465352456E-7
+10081828 0.0015474376273826532 1.190260337708598E-7
+10745355 -6.696847149303117E-4 1.1167616146697805E-7
+11127199 -1.797397530142132E-5 1.0784385180852791E-7
+11722580 NULL 1.023665438836843E-7
+12649396 NULL 9.486618965838368E-8
+13126214 -1.5236685917203544E-5 9.142011550322126E-8
+14042667 NULL 8.545385288991044E-8
+14943972 -1.3383322720358416E-5 8.02999363221505E-8
+16259022 NULL 7.380517721176587E-8
+16531556 -1.2098074736582569E-5 7.258844841949542E-8
+16596157 NULL 7.230589587697923E-8
+17058489 -1.1724367849930905E-5 7.034620709958544E-8
+17247320 -4.172242412154468E-4 6.957602688417679E-8
+19004427 8.209139901981786E-4 6.314318237534864E-8
+19498517 NULL 6.154314197331007E-8
+20165679 7.736411950224934E-4 5.95070466013071E-8
+20547875 NULL 5.840019953401507E-8
+23264783 NULL 5.158010715165492E-8
+23475527 6.645644206411213E-4 5.111706331448917E-8
+24379905 NULL 4.922086447834805E-8
+24514624 -2.935390728407664E-4 4.895037345871591E-8
+25154198 -2.860755091456305E-4 4.770575472133916E-8
+25245192 -7.922300610745999E-6 4.7533803664475993E-8
+26610943 NULL 4.509423059528556E-8
+27520143 5.668938566198584E-4 4.360442458456702E-8
+27818379 NULL 4.313694913711543E-8
+28400244 NULL 4.225315810666979E-8
+28698999 5.43607810153936E-4 4.18133050563889E-8
+28806400 -6.9429015774272385E-6 4.165740946456343E-8
+29920877 5.214085135271938E-4 4.010577631130264E-8
+33126539 NULL 3.622473207961749E-8
+34603086 NULL 3.467898787986713E-8
+35156265 NULL 3.413331876978399E-8
+35862260 NULL 3.346136021544654E-8
+36123797 -1.992038655294182E-4 3.321909931007529E-8
+36341671 -1.980096072082101E-4 3.301994561559924E-8
+36413215 -5.4925114412446145E-6 3.2955068647467685E-8
+36578596 4.2650625518814335E-4 3.280607052277239E-8
+36796441 -1.955623914823719E-4 3.2611849607955287E-8
+39723587 NULL 3.0208752296211316E-8
+39985709 -1.7996429674411925E-4 3.001072208073139E-8
+40018606 NULL 2.998605198791782E-8
+41003161 NULL 2.9266036342905367E-8
+41158231 3.790493328053871E-4 2.9155772025284565E-8
+41848817 NULL 2.8674645689506587E-8
+44047567 -1.633688416888043E-4 2.724327543448654E-8
+45125678 NULL 2.6592398234991615E-8
+45180154 NULL 2.6560334433565674E-8
+45717793 3.4124569399052136E-4 2.6247986205283355E-8
+46163162 NULL 2.5994753132378583E-8
+46525838 3.353190543284787E-4 2.5792120068852925E-8
+48626663 NULL 2.4677819244968545E-8
+49102701 -1.465499830650864E-4 2.4438574163160596E-8
+50300445 -1.4306036457530346E-4 2.3856647789100076E-8
+50929325 -1.412938420055636E-4 2.356206370298448E-8
+52422534 -1.3726921327381848E-4 2.2890919389741823E-8
+52667422 2.9621727070673783E-4 2.2784483356713376E-8
+52962061 2.945693522010029E-4 2.265772852004381E-8
+53695172 NULL 2.234837798824818E-8
+54760317 NULL 2.1913678841559662E-8
+55020655 2.835480602693661E-4 2.180999117513232E-8
+56102034 NULL 2.1389598815615135E-8
+56131313 NULL 2.13784416551952E-8
+56838351 -3.5187509222426247E-6 2.1112505533455745E-8
+56997841 -3.5089048372902406E-6 2.105342902374144E-8
+57778807 -1.2454393528755274E-4 2.076886080392764E-8
+58080381 NULL 2.0661021490199935E-8
+58307527 NULL 2.058053328174937E-8
+58536385 -1.2293208745295768E-4 2.0500070170031853E-8
+59347745 NULL 2.0219807846111087E-8
+60229567 NULL 1.992376933408802E-8
+60330397 NULL 1.9890470801974003E-8
+PREHOOK: query: -- There are no zeros in the table, but there is -200.0, so use it as zero
+
+explain
+select (cdouble + 200.0) as s1, cbigint / (cdouble + 200.0) as s2, (cdouble + 200.0) / (cdouble + 200.0), cbigint / (cdouble + 200.0), 1 / (cdouble + 200.0), 1.2 / (cdouble + 200.0)
+from alltypesorc where cdouble >= -500 and cdouble < -199 order by s1, s2 limit 100
+PREHOOK: type: QUERY
+POSTHOOK: query: -- There are no zeros in the table, but there is -200.0, so use it as zero
+
+explain
+select (cdouble + 200.0) as s1, cbigint / (cdouble + 200.0) as s2, (cdouble + 200.0) / (cdouble + 200.0), cbigint / (cdouble + 200.0), 1 / (cdouble + 200.0), 1.2 / (cdouble + 200.0)
+from alltypesorc where cdouble >= -500 and cdouble < -199 order by s1, s2 limit 100
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME alltypesorc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (+ (TOK_TABLE_OR_COL cdouble) 200.0) s1) (TOK_SELEXPR (/ (TOK_TABLE_OR_COL cbigint) (+ (TOK_TABLE_OR_COL cdouble) 200.0)) s2) (TOK_SELEXPR (/ (+ (TOK_TABLE_OR_COL cdouble) 200.0) (+ (TOK_TABLE_OR_COL cdouble) 200.0))) (TOK_SELEXPR (/ (TOK_TABLE_OR_COL cbigint) (+ (TOK_TABLE_OR_COL cdouble) 200.0))) (TOK_SELEXPR (/ 1 (+ (TOK_TABLE_OR_COL cdouble) 200.0))) (TOK_SELEXPR (/ 1.2 (+ (TOK_TABLE_OR_COL cdouble) 200.0)))) (TOK_WHERE (and (>= (TOK_TABLE_OR_COL cdouble) (- 500)) (< (TOK_TABLE_OR_COL cdouble) (- 199)))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL s1)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL s2))) (TOK_LIMIT 100)))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ alltypesorc
+ TableScan
+ alias: alltypesorc
+ Filter Operator
+ predicate:
+ expr: ((cdouble >= (- 500)) and (cdouble < (- 199)))
+ type: boolean
+ Vectorized execution: true
+ Select Operator
+ expressions:
+ expr: (cdouble + 200.0)
+ type: double
+ expr: (cbigint / (cdouble + 200.0))
+ type: double
+ expr: ((cdouble + 200.0) / (cdouble + 200.0))
+ type: double
+ expr: (cbigint / (cdouble + 200.0))
+ type: double
+ expr: (1 / (cdouble + 200.0))
+ type: double
+ expr: (1.2 / (cdouble + 200.0))
+ type: double
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
+ Vectorized execution: true
+ Reduce Output Operator
+ key expressions:
+ expr: _col0
+ type: double
+ expr: _col1
+ type: double
+ sort order: ++
+ tag: -1
+ value expressions:
+ expr: _col0
+ type: double
+ expr: _col1
+ type: double
+ expr: _col2
+ type: double
+ expr: _col3
+ type: double
+ expr: _col4
+ type: double
+ expr: _col5
+ type: double
+ Vectorized execution: true
+ Reduce Operator Tree:
+ Extract
+ Limit
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: 100
+
+
+PREHOOK: query: select (cdouble + 200.0) as s1, cbigint / (cdouble + 200.0) as s2, (cdouble + 200.0) / (cdouble + 200.0), cbigint / (cdouble + 200.0), 1 / (cdouble + 200.0), 1.2 / (cdouble + 200.0)
+from alltypesorc where cdouble >= -500 and cdouble < -199 order by s1, s2 limit 100
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: select (cdouble + 200.0) as s1, cbigint / (cdouble + 200.0) as s2, (cdouble + 200.0) / (cdouble + 200.0), cbigint / (cdouble + 200.0), 1 / (cdouble + 200.0), 1.2 / (cdouble + 200.0)
+from alltypesorc where cdouble >= -500 and cdouble < -199 order by s1, s2 limit 100
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+-292.0 NULL 1.0 NULL -0.003424657534246575 -0.00410958904109589
+-290.0 NULL 1.0 NULL -0.0034482758620689655 -0.004137931034482759
+-289.0 NULL 1.0 NULL -0.0034602076124567475 -0.004152249134948096
+-281.0 NULL 1.0 NULL -0.0035587188612099642 -0.004270462633451957
+-279.0 NULL 1.0 NULL -0.0035842293906810036 -0.004301075268817204
+-274.0 6888911.518248175 1.0 6888911.518248175 -0.0036496350364963502 -0.00437956204379562
+-273.0 6028764.868131869 1.0 6028764.868131869 -0.003663003663003663 -0.004395604395604396
+-257.0 6404096.53307393 1.0 6404096.53307393 -0.0038910505836575876 -0.004669260700389105
+-250.0 6583411.236 1.0 6583411.236 -0.0040 -0.0048
+-247.0 NULL 1.0 NULL -0.004048582995951417 -0.004858299595141701
+-247.0 -7546669.174089069 1.0 -7546669.174089069 -0.004048582995951417 -0.004858299595141701
+-246.0 NULL 1.0 NULL -0.0040650406504065045 -0.004878048780487805
+-237.0 NULL 1.0 NULL -0.004219409282700422 -0.005063291139240506
+-236.0 NULL 1.0 NULL -0.00423728813559322 -0.005084745762711864
+-229.0 7187130.170305677 1.0 7187130.170305677 -0.004366812227074236 -0.005240174672489083
+-228.0 8278779.631578947 1.0 8278779.631578947 -0.0043859649122807015 -0.005263157894736842
+-225.0 NULL 1.0 NULL -0.0044444444444444444 -0.005333333333333333
+-210.0 -8876320.40952381 1.0 -8876320.40952381 -0.004761904761904762 -0.005714285714285714
+-201.0 NULL 1.0 NULL -0.004975124378109453 -0.005970149253731343
+-199.0 NULL 1.0 NULL -0.005025125628140704 -0.006030150753768844
+-189.0 NULL 1.0 NULL -0.005291005291005291 -0.006349206349206349
+-188.0 NULL 1.0 NULL -0.005319148936170213 -0.006382978723404255
+-184.0 8944852.222826088 1.0 8944852.222826088 -0.005434782608695652 -0.006521739130434782
+-183.0 8993731.196721312 1.0 8993731.196721312 -0.00546448087431694 -0.006557377049180328
+-181.0 NULL 1.0 NULL -0.0055248618784530384 -0.0066298342541436465
+-179.0 NULL 1.0 NULL -0.00558659217877095 -0.0067039106145251395
+-169.0 9738774.01775148 1.0 9738774.01775148 -0.005917159763313609 -0.007100591715976331
+-164.0 NULL 1.0 NULL -0.006097560975609756 -0.007317073170731707
+-161.0 NULL 1.0 NULL -0.006211180124223602 -0.007453416149068323
+-154.0 1.2256894519480519E7 1.0 1.2256894519480519E7 -0.006493506493506494 -0.007792207792207792
+-152.0 NULL 1.0 NULL -0.006578947368421052 -0.007894736842105263
+-148.0 NULL 1.0 NULL -0.006756756756756757 -0.008108108108108109
+-140.0 NULL 1.0 NULL -0.007142857142857143 -0.008571428571428572
+-138.0 NULL 1.0 NULL -0.007246376811594203 -0.008695652173913044
+-137.0 NULL 1.0 NULL -0.0072992700729927005 -0.00875912408759124
+-132.0 NULL 1.0 NULL -0.007575757575757576 -0.00909090909090909
+-129.0 1.2758548906976745E7 1.0 1.2758548906976745E7 -0.007751937984496124 -0.009302325581395349
+-128.0 NULL 1.0 NULL -0.0078125 -0.009375
+-126.0 NULL 1.0 NULL -0.007936507936507936 -0.009523809523809523
+-126.0 -1.4793867349206349E7 1.0 -1.4793867349206349E7 -0.007936507936507936 -0.009523809523809523
+-116.0 NULL 1.0 NULL -0.008620689655172414 -0.010344827586206896
+-113.0 NULL 1.0 NULL -0.008849557522123894 -0.010619469026548672
+-113.0 -1.6495816690265486E7 1.0 -1.6495816690265486E7 -0.008849557522123894 -0.010619469026548672
+-96.0 NULL 1.0 NULL -0.010416666666666666 -0.012499999999999999
+-94.0 -1.9830077510638297E7 1.0 -1.9830077510638297E7 -0.010638297872340425 -0.01276595744680851
+-93.0 NULL 1.0 NULL -0.010752688172043012 -0.012903225806451613
+-77.0 2.4513789038961038E7 1.0 2.4513789038961038E7 -0.012987012987012988 -0.015584415584415584
+-69.0 2.735596747826087E7 1.0 2.735596747826087E7 -0.014492753623188406 -0.017391304347826087
+-62.0 NULL 1.0 NULL -0.016129032258064516 -0.01935483870967742
+-62.0 3.0444544451612905E7 1.0 3.0444544451612905E7 -0.016129032258064516 -0.01935483870967742
+-60.0 NULL 1.0 NULL -0.016666666666666666 -0.02
+-57.0 -3.27022330877193E7 1.0 -3.27022330877193E7 -0.017543859649122806 -0.021052631578947368
+-49.0 3.35888328367347E7 1.0 3.35888328367347E7 -0.02040816326530612 -0.024489795918367346
+-46.0 3.577940889130435E7 1.0 3.577940889130435E7 -0.021739130434782608 -0.02608695652173913
+-38.0 4.3311916026315786E7 1.0 4.3311916026315786E7 -0.02631578947368421 -0.031578947368421054
+-28.0 5.878045746428572E7 1.0 5.878045746428572E7 -0.03571428571428571 -0.04285714285714286
+-28.0 6.741291985714285E7 1.0 6.741291985714285E7 -0.03571428571428571 -0.04285714285714286
+-21.0 8.988389314285715E7 1.0 8.988389314285715E7 -0.047619047619047616 -0.05714285714285714
+-20.0 NULL 1.0 NULL -0.05 -0.06
+-17.0 NULL 1.0 NULL -0.058823529411764705 -0.07058823529411765
+-12.0 -1.5533560716666666E8 1.0 -1.5533560716666666E8 -0.08333333333333333 -0.09999999999999999
+-3.0 NULL 1.0 NULL -0.3333333333333333 -0.39999999999999997
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL
+0.0 NULL NULL NULL NULL NULL

Modified: hive/trunk/ql/src/test/results/clientpositive/vectorization_short_regress.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/vectorization_short_regress.q.out?rev=1547883&r1=1547882&r2=1547883&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/vectorization_short_regress.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/vectorization_short_regress.q.out Wed Dec 4 19:36:32 2013
@@ -3416,13 +3416,13 @@ NULL NULL false 1969-12-31 15:59:58.456
  NULL NULL true 1969-12-31 15:59:58.456 15601.0 -1.0 -630453835 15601 NULL NULL -630453834 630453835 1.0 -1260907669 1.0 -15601.0 NULL -6.30469436E8 1.389 -1824.0 -15601 NULL
  NULL NULL false 1969-12-31 15:59:58.456 15601.0 -1.0 -532065045 15601 NULL NULL -532065044 532065045 1.0 -1064130089 1.0 -15601.0 NULL -5.32080646E8 1.389 -8541.0 -15601 NULL
  NULL NULL false 1969-12-31 15:59:58.456 15601.0 -1.0 -457778616 15601 NULL NULL -457778615 457778616 1.0 -915557231 1.0 -15601.0 NULL -4.57794217E8 1.389 -14074.0 -15601 NULL
-NULL NULL true 1969-12-31 15:59:58.456 15601.0 0.0 -1355080830 15601 NULL NULL -1355080830 1355080830 -0.0 -2710161660 1.0 -15601.0 NULL -1.355096431E9 -Infinity -9172.0 -15601 NULL
-NULL NULL false 1969-12-31 15:59:58.456 15601.0 0.0 -1300968933 15601 NULL NULL -1300968933 1300968933 -0.0 -2601937866 1.0 -15601.0 NULL -1.300984534E9 -Infinity -1543.0 -15601 NULL
-NULL NULL false 1969-12-31 15:59:58.456 15601.0 0.0 -1111841132 15601 NULL NULL -1111841132 1111841132 -0.0 -2223682264 1.0 -15601.0 NULL -1.111856733E9 -Infinity -4665.0 -15601 NULL
-NULL NULL false 1969-12-31 15:59:58.456 15601.0 0.0 -901264012 15601 NULL NULL -901264012 901264012 -0.0 -1802528024 1.0 -15601.0 NULL -9.01279613E8 -Infinity -9843.0 -15601 NULL
-NULL NULL false 1969-12-31 15:59:58.456 15601.0 0.0 -438779645 15601 NULL NULL -438779645 438779645 -0.0 -877559290 1.0 -15601.0 NULL -4.38795246E8 -Infinity -1520.0 -15601 NULL
-NULL NULL true 1969-12-31 15:59:58.456 15601.0 0.0 -203039588 15601 NULL NULL -203039588 203039588 -0.0 -406079176 1.0 -15601.0 NULL -2.03055189E8 -Infinity -8174.0 -15601 NULL
-NULL NULL false 1969-12-31 15:59:58.456 15601.0 0.0 -39854776 15601 NULL NULL -39854776 39854776 -0.0 -79709552 1.0 -15601.0 NULL -3.9870377E7 -Infinity -9822.0 -15601 NULL
+NULL NULL true 1969-12-31 15:59:58.456 15601.0 0.0 -1355080830 15601 NULL NULL -1355080830 1355080830 -0.0 -2710161660 1.0 -15601.0 NULL -1.355096431E9 NULL -9172.0 -15601 NULL
+NULL NULL false 1969-12-31 15:59:58.456 15601.0 0.0 -1300968933 15601 NULL NULL -1300968933 1300968933 -0.0 -2601937866 1.0 -15601.0 NULL -1.300984534E9 NULL -1543.0 -15601 NULL
+NULL NULL false 1969-12-31 15:59:58.456 15601.0 0.0 -1111841132 15601 NULL NULL -1111841132 1111841132 -0.0 -2223682264 1.0 -15601.0 NULL -1.111856733E9 NULL -4665.0 -15601 NULL
+NULL NULL false 1969-12-31 15:59:58.456 15601.0 0.0 -901264012 15601 NULL NULL -901264012 901264012 -0.0 -1802528024 1.0 -15601.0 NULL -9.01279613E8 NULL -9843.0 -15601 NULL
+NULL NULL false 1969-12-31 15:59:58.456 15601.0 0.0 -438779645 15601 NULL NULL -438779645 438779645 -0.0 -877559290 1.0 -15601.0 NULL -4.38795246E8 NULL -1520.0 -15601 NULL
+NULL NULL true 1969-12-31 15:59:58.456 15601.0 0.0 -203039588 15601 NULL NULL -203039588 203039588 -0.0 -406079176 1.0 -15601.0 NULL -2.03055189E8 NULL -8174.0 -15601 NULL
+NULL NULL false 1969-12-31 15:59:58.456 15601.0 0.0 -39854776 15601 NULL NULL -39854776 39854776 -0.0 -79709552 1.0 -15601.0 NULL -3.9870377E7 NULL -9822.0 -15601 NULL
  NULL NULL true 1969-12-31 15:59:58.456 15601.0 1.0 -2136727102 15601 NULL NULL -2136727103 2136727102 -1.0 -4273454205 1.0 -15601.0 NULL -2.136742703E9 -1.389 -14142.0 -15601 NULL
  NULL NULL false 1969-12-31 15:59:58.456 15601.0 1.0 -1972121622 15601 NULL NULL -1972121623 1972121622 -1.0 -3944243245 1.0 -15601.0 NULL -1.972137223E9 -1.389 -14813.0 -15601 NULL
  NULL NULL true 1969-12-31 15:59:58.456 15601.0 1.0 -1025788056 15601 NULL NULL -1025788057 1025788056 -1.0 -2051576113 1.0 -15601.0 NULL -1.025803657E9 -1.389 -6705.0 -15601 NULL

Search Discussions

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 1 of 1 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedDec 4, '13 at 7:36p
activeDec 4, '13 at 7:36p
posts1
users1
websitehive.apache.org

1 user in discussion

Ehans: 1 post

People

Translate

site design / logo © 2021 Grokbase