FAQ
Author: kevinwilfong
Date: Sun Jan 13 02:11:34 2013
New Revision: 1432543

URL: http://svn.apache.org/viewvc?rev=1432543&view=rev
Log:
HIVE-3803. explain dependency should show the dependencies hierarchically in presence of views. (njain via kevinwilfong)

Modified:
     hive/trunk/hbase-handler/src/test/results/positive/hbase_stats.q.out
     hive/trunk/hbase-handler/src/test/results/positive/hbase_stats2.q.out
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
     hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
     hive/trunk/ql/src/test/queries/clientpositive/explain_dependency.q
     hive/trunk/ql/src/test/results/clientnegative/archive1.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive2.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_insert1.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_insert2.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_insert3.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_insert4.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_multi1.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_multi2.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_multi3.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_multi4.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_multi5.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_multi6.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_multi7.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_partspec1.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_partspec2.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_partspec3.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_partspec4.q.out
     hive/trunk/ql/src/test/results/clientnegative/archive_partspec5.q.out
     hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out
     hive/trunk/ql/src/test/results/clientnegative/create_or_replace_view1.q.out
     hive/trunk/ql/src/test/results/clientnegative/create_or_replace_view2.q.out
     hive/trunk/ql/src/test/results/clientnegative/lockneg3.q.out
     hive/trunk/ql/src/test/results/clientnegative/lockneg4.q.out
     hive/trunk/ql/src/test/results/clientnegative/protectmode_part1.q.out
     hive/trunk/ql/src/test/results/clientpositive/alter_concatenate_indexed_table.q.out
     hive/trunk/ql/src/test/results/clientpositive/alter_merge.q.out
     hive/trunk/ql/src/test/results/clientpositive/alter_merge_2.q.out
     hive/trunk/ql/src/test/results/clientpositive/alter_merge_stats.q.out
     hive/trunk/ql/src/test/results/clientpositive/alter_view_rename.q.out
     hive/trunk/ql/src/test/results/clientpositive/archive.q.out
     hive/trunk/ql/src/test/results/clientpositive/archive_corrupt.q.out
     hive/trunk/ql/src/test/results/clientpositive/archive_multi.q.out
     hive/trunk/ql/src/test/results/clientpositive/authorization_2.q.out
     hive/trunk/ql/src/test/results/clientpositive/authorization_6.q.out
     hive/trunk/ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out
     hive/trunk/ql/src/test/results/clientpositive/auto_join19.q.out
     hive/trunk/ql/src/test/results/clientpositive/auto_join25.q.out
     hive/trunk/ql/src/test/results/clientpositive/auto_join9.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucket3.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucket_groupby.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketcontext_1.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketcontext_2.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketcontext_3.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketcontext_4.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketcontext_6.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketcontext_7.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketcontext_8.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketizedhiveinputformat_auto.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin1.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin10.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin11.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin12.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin13.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin2.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin3.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin5.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin7.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin8.q.out
     hive/trunk/ql/src/test/results/clientpositive/bucketmapjoin9.q.out
     hive/trunk/ql/src/test/results/clientpositive/columnstats_partlvl.q.out
     hive/trunk/ql/src/test/results/clientpositive/combine2_hadoop20.q.out
     hive/trunk/ql/src/test/results/clientpositive/combine3.q.out
     hive/trunk/ql/src/test/results/clientpositive/concatenate_inherit_table_location.q.out
     hive/trunk/ql/src/test/results/clientpositive/create_or_replace_view.q.out
     hive/trunk/ql/src/test/results/clientpositive/create_view_partitioned.q.out
     hive/trunk/ql/src/test/results/clientpositive/database.q.out
     hive/trunk/ql/src/test/results/clientpositive/database_drop.q.out
     hive/trunk/ql/src/test/results/clientpositive/diff_part_input_formats.q.out
     hive/trunk/ql/src/test/results/clientpositive/escape1.q.out
     hive/trunk/ql/src/test/results/clientpositive/escape2.q.out
     hive/trunk/ql/src/test/results/clientpositive/explain_dependency.q.out
     hive/trunk/ql/src/test/results/clientpositive/fileformat_mix.q.out
     hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask.q.out
     hive/trunk/ql/src/test/results/clientpositive/filter_join_breaktask2.q.out
     hive/trunk/ql/src/test/results/clientpositive/global_limit.q.out
     hive/trunk/ql/src/test/results/clientpositive/groupby_map_ppr.q.out
     hive/trunk/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out
     hive/trunk/ql/src/test/results/clientpositive/groupby_ppr.q.out
     hive/trunk/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out
     hive/trunk/ql/src/test/results/clientpositive/groupby_sort_6.q.out
     hive/trunk/ql/src/test/results/clientpositive/groupby_sort_7.q.out
     hive/trunk/ql/src/test/results/clientpositive/hook_context_cs.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_auth.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_auto_mult_tables.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_auto_mult_tables_compact.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_auto_partitioned.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_auto_unused.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_bitmap.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_bitmap_rc.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_compact.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_compact_2.q.out
     hive/trunk/ql/src/test/results/clientpositive/index_stale_partitioned.q.out
     hive/trunk/ql/src/test/results/clientpositive/input23.q.out
     hive/trunk/ql/src/test/results/clientpositive/input24.q.out
     hive/trunk/ql/src/test/results/clientpositive/input25.q.out
     hive/trunk/ql/src/test/results/clientpositive/input26.q.out
     hive/trunk/ql/src/test/results/clientpositive/input28.q.out
     hive/trunk/ql/src/test/results/clientpositive/input39_hadoop20.q.out
     hive/trunk/ql/src/test/results/clientpositive/input40.q.out
     hive/trunk/ql/src/test/results/clientpositive/input41.q.out
     hive/trunk/ql/src/test/results/clientpositive/input42.q.out
     hive/trunk/ql/src/test/results/clientpositive/input_part1.q.out
     hive/trunk/ql/src/test/results/clientpositive/input_part2.q.out
     hive/trunk/ql/src/test/results/clientpositive/input_part5.q.out
     hive/trunk/ql/src/test/results/clientpositive/input_part6.q.out
     hive/trunk/ql/src/test/results/clientpositive/input_part7.q.out
     hive/trunk/ql/src/test/results/clientpositive/input_part9.q.out
     hive/trunk/ql/src/test/results/clientpositive/inputddl7.q.out
     hive/trunk/ql/src/test/results/clientpositive/insert1_overwrite_partitions.q.out
     hive/trunk/ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out
     hive/trunk/ql/src/test/results/clientpositive/insert_into2.q.out
     hive/trunk/ql/src/test/results/clientpositive/insert_into5.q.out
     hive/trunk/ql/src/test/results/clientpositive/insert_into6.q.out
     hive/trunk/ql/src/test/results/clientpositive/join14_hadoop20.q.out
     hive/trunk/ql/src/test/results/clientpositive/join26.q.out
     hive/trunk/ql/src/test/results/clientpositive/join28.q.out
     hive/trunk/ql/src/test/results/clientpositive/join32.q.out
     hive/trunk/ql/src/test/results/clientpositive/join33.q.out
     hive/trunk/ql/src/test/results/clientpositive/join9.q.out
     hive/trunk/ql/src/test/results/clientpositive/join_empty.q.out
     hive/trunk/ql/src/test/results/clientpositive/join_map_ppr.q.out
     hive/trunk/ql/src/test/results/clientpositive/lateral_view_ppd.q.out
     hive/trunk/ql/src/test/results/clientpositive/leftsemijoin.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part1.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part10.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part11.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part12.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part14.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part2.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part3.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part4.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part6.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part7.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part8.q.out
     hive/trunk/ql/src/test/results/clientpositive/load_dyn_part9.q.out
     hive/trunk/ql/src/test/results/clientpositive/lock2.q.out
     hive/trunk/ql/src/test/results/clientpositive/lock3.q.out
     hive/trunk/ql/src/test/results/clientpositive/lock4.q.out
     hive/trunk/ql/src/test/results/clientpositive/louter_join_ppr.q.out
     hive/trunk/ql/src/test/results/clientpositive/mapjoin1.q.out
     hive/trunk/ql/src/test/results/clientpositive/mapjoin_distinct.q.out
     hive/trunk/ql/src/test/results/clientpositive/mapjoin_hook.q.out
     hive/trunk/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
     hive/trunk/ql/src/test/results/clientpositive/mapjoin_subquery.q.out
     hive/trunk/ql/src/test/results/clientpositive/merge1.q.out
     hive/trunk/ql/src/test/results/clientpositive/merge2.q.out
     hive/trunk/ql/src/test/results/clientpositive/merge3.q.out
     hive/trunk/ql/src/test/results/clientpositive/merge4.q.out
     hive/trunk/ql/src/test/results/clientpositive/merge_dynamic_partition.q.out
     hive/trunk/ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out
     hive/trunk/ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out
     hive/trunk/ql/src/test/results/clientpositive/merge_dynamic_partition4.q.out
     hive/trunk/ql/src/test/results/clientpositive/merge_dynamic_partition5.q.out
     hive/trunk/ql/src/test/results/clientpositive/metadataonly1.q.out
     hive/trunk/ql/src/test/results/clientpositive/mi.q.out
     hive/trunk/ql/src/test/results/clientpositive/nonmr_fetch.q.out
     hive/trunk/ql/src/test/results/clientpositive/nullgroup3.q.out
     hive/trunk/ql/src/test/results/clientpositive/nullgroup5.q.out
     hive/trunk/ql/src/test/results/clientpositive/nullinput2.q.out
     hive/trunk/ql/src/test/results/clientpositive/outer_join_ppr.q.out
     hive/trunk/ql/src/test/results/clientpositive/partInit.q.out
     hive/trunk/ql/src/test/results/clientpositive/partcols1.q.out
     hive/trunk/ql/src/test/results/clientpositive/partition_decode_name.q.out
     hive/trunk/ql/src/test/results/clientpositive/partition_serde_format.q.out
     hive/trunk/ql/src/test/results/clientpositive/partition_special_char.q.out
     hive/trunk/ql/src/test/results/clientpositive/partition_vs_table_metadata.q.out
     hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat.q.out
     hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat3.q.out
     hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat5.q.out
     hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat6.q.out
     hive/trunk/ql/src/test/results/clientpositive/partition_wise_fileformat7.q.out
     hive/trunk/ql/src/test/results/clientpositive/pcr.q.out
     hive/trunk/ql/src/test/results/clientpositive/ppd2.q.out
     hive/trunk/ql/src/test/results/clientpositive/ppd_udf_case.q.out
     hive/trunk/ql/src/test/results/clientpositive/ppd_union_view.q.out
     hive/trunk/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out
     hive/trunk/ql/src/test/results/clientpositive/ppr_pushdown.q.out
     hive/trunk/ql/src/test/results/clientpositive/ppr_pushdown3.q.out
     hive/trunk/ql/src/test/results/clientpositive/protectmode.q.out
     hive/trunk/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out
     hive/trunk/ql/src/test/results/clientpositive/quote1.q.out
     hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out
     hive/trunk/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out
     hive/trunk/ql/src/test/results/clientpositive/rcfile_createas1.q.out
     hive/trunk/ql/src/test/results/clientpositive/rcfile_merge1.q.out
     hive/trunk/ql/src/test/results/clientpositive/rcfile_merge2.q.out
     hive/trunk/ql/src/test/results/clientpositive/rcfile_merge3.q.out
     hive/trunk/ql/src/test/results/clientpositive/rcfile_merge4.q.out
     hive/trunk/ql/src/test/results/clientpositive/regex_col.q.out
     hive/trunk/ql/src/test/results/clientpositive/rename_external_partition_location.q.out
     hive/trunk/ql/src/test/results/clientpositive/rename_partition_location.q.out
     hive/trunk/ql/src/test/results/clientpositive/router_join_ppr.q.out
     hive/trunk/ql/src/test/results/clientpositive/sample1.q.out
     hive/trunk/ql/src/test/results/clientpositive/sample10.q.out
     hive/trunk/ql/src/test/results/clientpositive/sample8.q.out
     hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook_hadoop20.q.out
     hive/trunk/ql/src/test/results/clientpositive/select_unquote_and.q.out
     hive/trunk/ql/src/test/results/clientpositive/select_unquote_not.q.out
     hive/trunk/ql/src/test/results/clientpositive/select_unquote_or.q.out
     hive/trunk/ql/src/test/results/clientpositive/smb_mapjoin9.q.out
     hive/trunk/ql/src/test/results/clientpositive/smb_mapjoin_11.q.out
     hive/trunk/ql/src/test/results/clientpositive/smb_mapjoin_12.q.out
     hive/trunk/ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out
     hive/trunk/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out
     hive/trunk/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out
     hive/trunk/ql/src/test/results/clientpositive/split_sample.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats10.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats11.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats12.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats13.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats14.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats15.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats2.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats3.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats4.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats6.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats7.q.out
     hive/trunk/ql/src/test/results/clientpositive/stats8.q.out
     hive/trunk/ql/src/test/results/clientpositive/touch.q.out
     hive/trunk/ql/src/test/results/clientpositive/transform_ppr1.q.out
     hive/trunk/ql/src/test/results/clientpositive/transform_ppr2.q.out
     hive/trunk/ql/src/test/results/clientpositive/union22.q.out
     hive/trunk/ql/src/test/results/clientpositive/union25.q.out
     hive/trunk/ql/src/test/results/clientpositive/union26.q.out
     hive/trunk/ql/src/test/results/clientpositive/union_lateralview.q.out
     hive/trunk/ql/src/test/results/clientpositive/union_ppr.q.out
     hive/trunk/ql/src/test/results/clientpositive/updateAccessTime.q.out

Modified: hive/trunk/hbase-handler/src/test/results/positive/hbase_stats.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/results/positive/hbase_stats.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/results/positive/hbase_stats.q.out (original)
+++ hive/trunk/hbase-handler/src/test/results/positive/hbase_stats.q.out Sun Jan 13 02:11:34 2013
@@ -94,11 +94,13 @@ POSTHOOK: Lineage: stats_src.key SIMPLE
  POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  PREHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='11') compute statistics
  PREHOOK: type: QUERY
+PREHOOK: Input: default@stats_part
  PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
  PREHOOK: Output: default@stats_part
  PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
  POSTHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='11') compute statistics
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@stats_part
  POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
  POSTHOOK: Output: default@stats_part
  POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
@@ -110,11 +112,13 @@ POSTHOOK: Lineage: stats_src.key SIMPLE
  POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  PREHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='12') compute statistics
  PREHOOK: type: QUERY
+PREHOOK: Input: default@stats_part
  PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
  PREHOOK: Output: default@stats_part
  PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
  POSTHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='12') compute statistics
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@stats_part
  POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
  POSTHOOK: Output: default@stats_part
  POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
@@ -284,6 +288,7 @@ Storage Desc Params:
   serialization.format 1
  PREHOOK: query: analyze table stats_part partition(ds, hr) compute statistics
  PREHOOK: type: QUERY
+PREHOOK: Input: default@stats_part
  PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
  PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
  PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=13
@@ -293,6 +298,7 @@ PREHOOK: Output: default@stats_part@ds=2
  PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
  POSTHOOK: query: analyze table stats_part partition(ds, hr) compute statistics
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@stats_part
  POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
  POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
  POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=13

Modified: hive/trunk/hbase-handler/src/test/results/positive/hbase_stats2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/results/positive/hbase_stats2.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/results/positive/hbase_stats2.q.out (original)
+++ hive/trunk/hbase-handler/src/test/results/positive/hbase_stats2.q.out Sun Jan 13 02:11:34 2013
@@ -94,11 +94,13 @@ POSTHOOK: Lineage: stats_src.key SIMPLE
  POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  PREHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='11') compute statistics
  PREHOOK: type: QUERY
+PREHOOK: Input: default@stats_part
  PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
  PREHOOK: Output: default@stats_part
  PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
  POSTHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='11') compute statistics
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@stats_part
  POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
  POSTHOOK: Output: default@stats_part
  POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=11
@@ -110,11 +112,13 @@ POSTHOOK: Lineage: stats_src.key SIMPLE
  POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  PREHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='12') compute statistics
  PREHOOK: type: QUERY
+PREHOOK: Input: default@stats_part
  PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
  PREHOOK: Output: default@stats_part
  PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
  POSTHOOK: query: analyze table stats_part partition(ds='2010-04-08', hr='12') compute statistics
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@stats_part
  POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
  POSTHOOK: Output: default@stats_part
  POSTHOOK: Output: default@stats_part@ds=2010-04-08/hr=12
@@ -284,6 +288,7 @@ Storage Desc Params:
   serialization.format 1
  PREHOOK: query: analyze table stats_part partition(ds, hr) compute statistics
  PREHOOK: type: QUERY
+PREHOOK: Input: default@stats_part
  PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
  PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
  PREHOOK: Input: default@stats_part@ds=2010-04-08/hr=13
@@ -293,6 +298,7 @@ PREHOOK: Output: default@stats_part@ds=2
  PREHOOK: Output: default@stats_part@ds=2010-04-08/hr=13
  POSTHOOK: query: analyze table stats_part partition(ds, hr) compute statistics
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@stats_part
  POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=11
  POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=12
  POSTHOOK: Input: default@stats_part@ds=2010-04-08/hr=13

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Sun Jan 13 02:11:34 2013
@@ -561,8 +561,8 @@ public class Driver implements CommandPr

        Map<String, Boolean> tableUsePartLevelAuth = new HashMap<String, Boolean>();
        for (ReadEntity read : inputs) {
- if (read.getPartition() != null) {
- Table tbl = read.getTable();
+ Table tbl = read.getTable();
+ if ((read.getPartition() != null) || (tbl.isPartitioned())) {
            String tblName = tbl.getTableName();
            if (tableUsePartLevelAuth.get(tblName) == null) {
              boolean usePartLevelPriv = (tbl.getParameters().get(
@@ -636,34 +636,33 @@ public class Driver implements CommandPr
          }
        }

-
- //cache the results for table authorization
+ // cache the results for table authorization
        Set<String> tableAuthChecked = new HashSet<String>();
        for (ReadEntity read : inputs) {
- Table tbl = null;
+ Table tbl = read.getTable();
          if (read.getPartition() != null) {
- tbl = read.getPartition().getTable();
+ Partition partition = read.getPartition();
+ tbl = partition.getTable();
            // use partition level authorization
            if (tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE) {
- List<String> cols = part2Cols.get(read.getPartition());
+ List<String> cols = part2Cols.get(partition);
              if (cols != null && cols.size() > 0) {
- ss.getAuthorizer().authorize(read.getPartition().getTable(),
- read.getPartition(), cols, op.getInputRequiredPrivileges(),
+ ss.getAuthorizer().authorize(partition.getTable(),
+ partition, cols, op.getInputRequiredPrivileges(),
                    null);
              } else {
- ss.getAuthorizer().authorize(read.getPartition(),
+ ss.getAuthorizer().authorize(partition,
                    op.getInputRequiredPrivileges(), null);
              }
              continue;
            }
- } else if (read.getTable() != null) {
- tbl = read.getTable();
          }

          // if we reach here, it means it needs to do a table authorization
          // check, and the table authorization may already happened because of other
          // partitions
- if (tbl != null && !tableAuthChecked.contains(tbl.getTableName())) {
+ if (tbl != null && !tableAuthChecked.contains(tbl.getTableName()) &&
+ !(tableUsePartLevelAuth.get(tbl.getTableName()) == Boolean.TRUE)) {
            List<String> cols = tab2Cols.get(tbl);
            if (cols != null && cols.size() > 0) {
              ss.getAuthorizer().authorize(tbl, null, cols,

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Sun Jan 13 02:11:34 2013
@@ -78,34 +78,34 @@ public class ExplainTask extends Task<Ex

      JSONObject outJSONObject = new JSONObject();
      List<Map<String, String>> inputTableInfo = new ArrayList<Map<String, String>>();
- Set<String> inputPartitions = new HashSet<String>();
- Set<String> inputTables = new HashSet<String>();
- Table table = null;
+ List<Map<String, String>> inputPartitionInfo = new ArrayList<Map<String, String>>();
      for (ReadEntity input: work.getInputs()) {
        switch (input.getType()) {
          case TABLE:
- table = input.getTable();
+ Table table = input.getTable();
+ Map<String, String> tableInfo = new HashMap<String, String>();
+ tableInfo.put("tablename", table.getCompleteName());
+ tableInfo.put("tabletype", table.getTableType().toString());
+ if ((input.getParents() != null) && (!input.getParents().isEmpty())) {
+ tableInfo.put("tableParents", input.getParents().toString());
+ }
+ inputTableInfo.add(tableInfo);
            break;
          case PARTITION:
- inputPartitions.add(input.getPartition().getCompleteName());
- table = input.getPartition().getTable();
+ Map<String, String> partitionInfo = new HashMap<String, String>();
+ partitionInfo.put("partitionName", input.getPartition().getCompleteName());
+ if ((input.getParents() != null) && (!input.getParents().isEmpty())) {
+ partitionInfo.put("partitionParents", input.getParents().toString());
+ }
+ inputPartitionInfo.add(partitionInfo);
            break;
          default:
- table = null;
            break;
        }
-
- if (table != null && !inputTables.contains(table.getCompleteName())) {
- Map<String, String> tableInfo = new HashMap<String, String>();
- tableInfo.put("tablename", table.getCompleteName());
- tableInfo.put("tabletype", table.getTableType().toString());
- inputTableInfo.add(tableInfo);
- inputTables.add(table.getCompleteName());
- }
      }

      outJSONObject.put("input_tables", inputTableInfo);
- outJSONObject.put("input_partitions", inputPartitions);
+ outJSONObject.put("input_partitions", inputPartitionInfo);
      return outJSONObject;
    }


Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java Sun Jan 13 02:11:34 2013
@@ -19,6 +19,8 @@
  package org.apache.hadoop.hive.ql.hooks;

  import java.io.Serializable;
+import java.util.HashSet;
+import java.util.Set;

  import org.apache.hadoop.hive.ql.metadata.Partition;
  import org.apache.hadoop.hive.ql.metadata.Table;
@@ -29,6 +31,13 @@ import org.apache.hadoop.hive.ql.metadat
   */
  public class ReadEntity extends Entity implements Serializable {

+ // Consider a query like: select * from V, where the view V is defined as:
+ // select * from T
+ // The inputs will contain V and T (parent: V)
+
+ // For views, the entities can be nested - by default, entities are at the top level
+ private Set<ReadEntity> parents = null;
+
    /**
     * For serialization only.
     */
@@ -46,8 +55,20 @@ public class ReadEntity extends Entity i
      super(t);
    }

+ private void initParent(ReadEntity parent) {
+ if (parent != null) {
+ this.parents = new HashSet<ReadEntity>();
+ this.parents.add(parent);
+ }
+ }
+
+ public ReadEntity(Table t, ReadEntity parent) {
+ super(t);
+ initParent(parent);
+ }
+
    /**
- * Constructor given a partiton.
+ * Constructor given a partition.
     *
     * @param p
     * The partition that the query reads from.
@@ -56,6 +77,15 @@ public class ReadEntity extends Entity i
      super(p);
    }

+ public ReadEntity(Partition p, ReadEntity parent) {
+ super(p);
+ initParent(parent);
+ }
+
+ public Set<ReadEntity> getParents() {
+ return parents;
+ }
+
    /**
     * Equals function.
     */

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Sun Jan 13 02:11:34 2013
@@ -601,6 +601,29 @@ public final class GenMapRedUtils {
      setTaskPlan(alias_id, topOp, plan, local, opProcCtx, null);
    }

+ private static ReadEntity getParentViewInfo(String alias_id,
+ Map<String, ReadEntity> viewAliasToInput) {
+ String[] aliases = alias_id.split(":");
+
+ String currentAlias = null;
+ ReadEntity currentInput = null;
+ // Find the immediate parent possible.
+ // For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T
+ // -> implies depends on.
+ // T's parent would be V1
+ for (int pos = 0; pos < aliases.length; pos++) {
+ currentAlias = currentAlias == null ? aliases[pos] : currentAlias + ":" + aliases[pos];
+ ReadEntity input = viewAliasToInput.get(currentAlias);
+ if (input == null) {
+ return currentInput;
+ }
+ currentInput = input;
+ }
+
+ return currentInput;
+ }
+
+
    /**
     * set the current task in the mapredWork.
     *
@@ -703,11 +726,21 @@ public final class GenMapRedUtils {
      boolean isFirstPart = true;
      boolean emptyInput = true;
      boolean singlePartition = (parts.size() == 1);
+
+ // Track the dependencies for the view. Consider a query like: select * from V;
+ // where V is a view of the form: select * from T
+ // The dependencies should include V at depth 0, and T at depth 1 (inferred).
+ ReadEntity parentViewInfo = getParentViewInfo(alias_id, parseCtx.getViewAliasToInput());
+
+ // The table should also be considered a part of inputs, even if the table is a
+ // partitioned table and whether any partition is selected or not
+ PlanUtils.addInput(inputs,
+ new ReadEntity(parseCtx.getTopToTable().get(topOp), parentViewInfo));
      for (Partition part : parts) {
        if (part.getTable().isPartitioned()) {
- inputs.add(new ReadEntity(part));
+ PlanUtils.addInput(inputs, new ReadEntity(part, parentViewInfo));
        } else {
- inputs.add(new ReadEntity(part.getTable()));
+ PlanUtils.addInput(inputs, new ReadEntity(part.getTable(), parentViewInfo));
        }

        // Later the properties have to come from the partition as opposed

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Sun Jan 13 02:11:34 2013
@@ -85,6 +85,7 @@ public class ParseContext {
    // reducer
    private Map<GroupByOperator, Set<String>> groupOpToInputTables;
    private Map<String, PrunedPartitionList> prunedPartitions;
+ private Map<String, ReadEntity> viewAliasToInput;

    /**
     * The lineage information.
@@ -169,7 +170,8 @@ public class ParseContext {
        GlobalLimitCtx globalLimitCtx,
        HashMap<String, SplitSample> nameToSplitSample,
        HashSet<ReadEntity> semanticInputs, List<Task<? extends Serializable>> rootTasks,
- Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner) {
+ Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner,
+ Map<String, ReadEntity> viewAliasToInput) {
      this.conf = conf;
      this.qb = qb;
      this.ast = ast;
@@ -196,6 +198,7 @@ public class ParseContext {
      this.semanticInputs = semanticInputs;
      this.rootTasks = rootTasks;
      this.opToPartToSkewedPruner = opToPartToSkewedPruner;
+ this.viewAliasToInput = viewAliasToInput;
    }

    /**
@@ -578,4 +581,7 @@ public class ParseContext {
      this.opToPartToSkewedPruner = opToPartToSkewedPruner;
    }

+ public Map<String, ReadEntity> getViewAliasToInput() {
+ return viewAliasToInput;
+ }
  }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Sun Jan 13 02:11:34 2013
@@ -221,6 +221,13 @@ public class SemanticAnalyzer extends Ba
    private final String autogenColAliasPrfxLbl;
    private final boolean autogenColAliasPrfxIncludeFuncName;

+ // Keep track of view alias to read entity corresponding to the view
+ // For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T
+ // keeps track of aliases for V3, V3:V2, V3:V2:V1.
+ // This is used when T is added as an input for the query, the parents of T is
+ // derived from the alias V3:V2:V1:T
+ private final Map<String, ReadEntity> viewAliasToInput = new HashMap<String, ReadEntity>();
+
    // Max characters when auto generating the column name with func name
    private static final int AUTOGEN_COLALIAS_PRFX_MAXLENGTH = 20;

@@ -303,7 +310,7 @@ public class SemanticAnalyzer extends Ba
          loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
          listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
          opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
- opToPartToSkewedPruner);
+ opToPartToSkewedPruner, viewAliasToInput);
    }

    @SuppressWarnings("nls")
@@ -948,16 +955,25 @@ public class SemanticAnalyzer extends Ba
    }

    private void getMetaData(QBExpr qbexpr) throws SemanticException {
+ getMetaData(qbexpr, null);
+ }
+
+ private void getMetaData(QBExpr qbexpr, ReadEntity parentInput)
+ throws SemanticException {
      if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
- getMetaData(qbexpr.getQB());
+ getMetaData(qbexpr.getQB(), parentInput);
      } else {
- getMetaData(qbexpr.getQBExpr1());
- getMetaData(qbexpr.getQBExpr2());
+ getMetaData(qbexpr.getQBExpr1(), parentInput);
+ getMetaData(qbexpr.getQBExpr2(), parentInput);
      }
    }

- @SuppressWarnings("nls")
    public void getMetaData(QB qb) throws SemanticException {
+ getMetaData(qb, null);
+ }
+
+ @SuppressWarnings("nls")
+ public void getMetaData(QB qb, ReadEntity parentInput) throws SemanticException {
      try {

        LOG.info("Get metadata for source tables");
@@ -966,7 +982,13 @@ public class SemanticAnalyzer extends Ba
        // We have to materialize the table alias list since we might
        // modify it in the middle for view rewrite.
        List<String> tabAliases = new ArrayList<String>(qb.getTabAliases());
- Map<String, String> aliasToViewName = new HashMap<String, String>();
+
+ // Keep track of view alias to view name and read entity
+ // For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T
+ // keeps track of full view name and read entity corresponding to alias V3, V3:V2, V3:V2:V1.
+ // This is needed for tracking the dependencies for inputs, along with their parents.
+ Map<String, ObjectPair<String, ReadEntity>> aliasToViewInfo =
+ new HashMap<String, ObjectPair<String, ReadEntity>>();
        for (String alias : tabAliases) {
          String tab_name = qb.getTabNameForAlias(alias);
          Table tab = null;
@@ -1008,10 +1030,12 @@ public class SemanticAnalyzer extends Ba
                  " -> " + fullViewName + ").");
            }
            replaceViewReferenceWithDefinition(qb, tab, tab_name, alias);
- aliasToViewName.put(alias, fullViewName);
            // This is the last time we'll see the Table objects for views, so add it to the inputs
            // now
- inputs.add(new ReadEntity(tab));
+ ReadEntity viewInput = new ReadEntity(tab, parentInput);
+ viewInput = PlanUtils.addInput(inputs, viewInput);
+ aliasToViewInfo.put(alias, new ObjectPair<String, ReadEntity>(fullViewName, viewInput));
+ viewAliasToInput.put(getAliasId(alias, qb), viewInput);
            continue;
          }

@@ -1041,12 +1065,14 @@ public class SemanticAnalyzer extends Ba
        LOG.info("Get metadata for subqueries");
        // Go over the subqueries and getMetaData for these
        for (String alias : qb.getSubqAliases()) {
- boolean wasView = aliasToViewName.containsKey(alias);
+ boolean wasView = aliasToViewInfo.containsKey(alias);
+ ReadEntity newParentInput = null;
          if (wasView) {
- viewsExpanded.add(aliasToViewName.get(alias));
+ viewsExpanded.add(aliasToViewInfo.get(alias).getFirst());
+ newParentInput = aliasToViewInfo.get(alias).getSecond();
          }
          QBExpr qbexpr = qb.getSubqForAlias(alias);
- getMetaData(qbexpr);
+ getMetaData(qbexpr, newParentInput);
          if (wasView) {
            viewsExpanded.remove(viewsExpanded.size() - 1);
          }
@@ -6961,7 +6987,7 @@ public class SemanticAnalyzer extends Ba
            // (There are no aggregations in a representative query for the group and
            // There is no group by in that representative query) or
            // The data is skewed or
- // The conf variable used to control combining group bys into a signle reducer is false
+ // The conf variable used to control combining group bys into a single reducer is false
            if (commonGroupByDestGroup.size() == 1 ||
                (qbp.getAggregationExprsForClause(firstDest).size() == 0 &&
                getGroupByForClause(qbp, firstDest).size() == 0) ||
@@ -8453,7 +8479,7 @@ public class SemanticAnalyzer extends Ba
          loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
          listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
          opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
- opToPartToSkewedPruner);
+ opToPartToSkewedPruner, viewAliasToInput);

      // Generate table access stats if required
      if (HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_TABLEKEYS) == true) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Sun Jan 13 02:11:34 2013
@@ -25,6 +25,7 @@ import java.util.LinkedHashMap;
  import java.util.List;
  import java.util.Map;
  import java.util.Properties;
+import java.util.Set;

  import org.apache.commons.logging.Log;
  import org.apache.commons.logging.LogFactory;
@@ -35,6 +36,7 @@ import org.apache.hadoop.hive.ql.exec.Co
  import org.apache.hadoop.hive.ql.exec.Operator;
  import org.apache.hadoop.hive.ql.exec.RowSchema;
  import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
  import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
  import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
  import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
@@ -758,4 +760,37 @@ public final class PlanUtils {
      // prevent instantiation
    }

+ // Add the input 'newInput' to the set of inputs for the query.
+ // The input may or may not be already present.
+ // The ReadEntity also contains the parents from it is derived (only populated
+ // in case of views). The equals method for ReadEntity does not compare the parents
+ // so that the same input with different parents cannot be added twice. If the input
+ // is already present, make sure the parents are added.
+ // Consider the query:
+ // select * from (select * from V2 union all select * from V3) subq;
+ // where both V2 and V3 depend on V1
+ // addInput would be called twice for V1 (one with parent V2 and the other with parent V3).
+ // When addInput is called for the first time for V1, V1 (parent V2) is added to inputs.
+ // When addInput is called for the second time for V1, the input V1 from inputs is picked up,
+ // and it's parents are enhanced to include V2 and V3
+ // The inputs will contain: (V2, no parent), (V3, no parent), (v1, parents(V2, v3))
+ public static ReadEntity addInput(Set<ReadEntity> inputs, ReadEntity newInput) {
+ // If the input is already present, make sure the new parent is added to the input.
+ if (inputs.contains(newInput)) {
+ for (ReadEntity input : inputs) {
+ if (input.equals(newInput)) {
+ if ((newInput.getParents() != null) && (!newInput.getParents().isEmpty())) {
+ input.getParents().addAll(newInput.getParents());
+ }
+ return input;
+ }
+ }
+ assert false;
+ } else {
+ inputs.add(newInput);
+ return newInput;
+ }
+ // make compile happy
+ return null;
+ }
  }

Modified: hive/trunk/ql/src/test/queries/clientpositive/explain_dependency.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/explain_dependency.q?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/explain_dependency.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/explain_dependency.q Sun Jan 13 02:11:34 2013
@@ -27,3 +27,8 @@ EXPLAIN DEPENDENCY SELECT * FROM V1;
  EXPLAIN DEPENDENCY SELECT * FROM V2;
  EXPLAIN DEPENDENCY SELECT * FROM V3;
  EXPLAIN DEPENDENCY SELECT * FROM V4;
+
+-- The table should show up in the explain dependency even if none
+-- of the partitions are selected.
+CREATE VIEW V5 as SELECT * FROM srcpart where ds = '10';
+EXPLAIN DEPENDENCY SELECT * FROM V5;

Modified: hive/trunk/ql/src/test/results/clientnegative/archive1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive1.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive1.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@srcpart_archiv
  PREHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive2.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive2.q.out Sun Jan 13 02:11:34 2013
@@ -16,11 +16,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12')
  select key, value from srcpart where ds='2008-04-08' and hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12')
  select key, value from srcpart where ds='2008-04-08' and hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_insert1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_insert1.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_insert1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_insert1.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_insert2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_insert2.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_insert2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_insert2.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_insert3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_insert3.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_insert3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_insert3.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_insert4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_insert4.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_insert4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_insert4.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_multi1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_multi1.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_multi1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_multi1.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -24,11 +26,13 @@ POSTHOOK: Lineage: tstsrcpart PARTITION(
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_multi2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_multi2.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_multi2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_multi2.q.out Sun Jan 13 02:11:34 2013
@@ -16,11 +16,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -28,11 +30,13 @@ POSTHOOK: Lineage: tstsrcpart PARTITION(
  PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12')
  select key, value from srcpart where ds='2008-04-08' and hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='12')
  select key, value from srcpart where ds='2008-04-08' and hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_multi3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_multi3.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_multi3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_multi3.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -24,11 +26,13 @@ POSTHOOK: Lineage: tstsrcpart PARTITION(
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_multi4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_multi4.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_multi4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_multi4.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -24,11 +26,13 @@ POSTHOOK: Lineage: tstsrcpart PARTITION(
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_multi5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_multi5.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_multi5.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_multi5.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -24,11 +26,13 @@ POSTHOOK: Lineage: tstsrcpart PARTITION(
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_multi6.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_multi6.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_multi6.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_multi6.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
@@ -24,11 +26,13 @@ POSTHOOK: Lineage: tstsrcpart PARTITION(
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_multi7.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_multi7.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_multi7.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_multi7.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: query: INSERT OVERWRITE TABLE tstsrcpart PARTITION (ds='2008-04-08', hr='11')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='11'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_partspec1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_partspec1.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_partspec1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_partspec1.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@srcpart_archiv
  PREHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_partspec2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_partspec2.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_partspec2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_partspec2.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@srcpart_archiv
  PREHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_partspec3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_partspec3.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_partspec3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_partspec3.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@srcpart_archiv
  PREHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_partspec4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_partspec4.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_partspec4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_partspec4.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@srcpart_archiv
  PREHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12
  POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/archive_partspec5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/archive_partspec5.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/archive_partspec5.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/archive_partspec5.q.out Sun Jan 13 02:11:34 2013
@@ -12,11 +12,13 @@ POSTHOOK: Output: default@srcpart_archiv
  PREHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12', min='00')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  PREHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=00
  POSTHOOK: query: INSERT OVERWRITE TABLE srcpart_archived PARTITION (ds='2008-04-08', hr='12', min='00')
  SELECT key, value FROM srcpart WHERE ds='2008-04-08' AND hr='12'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
  POSTHOOK: Output: default@srcpart_archived@ds=2008-04-08/hr=12/min=00
  POSTHOOK: Lineage: srcpart_archived PARTITION(ds=2008-04-08,hr=12,min=00).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_part.q.out Sun Jan 13 02:11:34 2013
@@ -113,10 +113,12 @@ privilege Select
  grantor hive_test_user
  PREHOOK: query: select key, value from authorization_part_fail where ds='2010' order by key limit 20
  PREHOOK: type: QUERY
+PREHOOK: Input: default@authorization_part_fail
  PREHOOK: Input: default@authorization_part_fail@ds=2010
  #### A masked pattern was here ####
  POSTHOOK: query: select key, value from authorization_part_fail where ds='2010' order by key limit 20
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@authorization_part_fail
  POSTHOOK: Input: default@authorization_part_fail@ds=2010
  #### A masked pattern was here ####
  POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).key EXPRESSION [(src_auth)src_auth.FieldSchema(name:key, type:string, comment:null), ]
@@ -190,10 +192,12 @@ privilege Select
  grantor hive_test_user
  PREHOOK: query: select key, value from authorization_part_fail where ds='2011' order by key limit 20
  PREHOOK: type: QUERY
+PREHOOK: Input: default@authorization_part_fail
  PREHOOK: Input: default@authorization_part_fail@ds=2011
  #### A masked pattern was here ####
  POSTHOOK: query: select key, value from authorization_part_fail where ds='2011' order by key limit 20
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@authorization_part_fail
  POSTHOOK: Input: default@authorization_part_fail@ds=2011
  #### A masked pattern was here ####
  POSTHOOK: Lineage: authorization_part_fail PARTITION(ds=2010).key EXPRESSION [(src_auth)src_auth.FieldSchema(name:key, type:string, comment:null), ]
@@ -222,11 +226,13 @@ POSTHOOK: Lineage: authorization_part_fa
  19 val_19
  PREHOOK: query: select key,value, ds from authorization_part_fail where ds>='2010' order by key, ds limit 20
  PREHOOK: type: QUERY
+PREHOOK: Input: default@authorization_part_fail
  PREHOOK: Input: default@authorization_part_fail@ds=2010
  PREHOOK: Input: default@authorization_part_fail@ds=2011
  #### A masked pattern was here ####
  POSTHOOK: query: select key,value, ds from authorization_part_fail where ds>='2010' order by key, ds limit 20
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@authorization_part_fail
  POSTHOOK: Input: default@authorization_part_fail@ds=2010
  POSTHOOK: Input: default@authorization_part_fail@ds=2011
  #### A masked pattern was here ####

Modified: hive/trunk/ql/src/test/results/clientnegative/create_or_replace_view1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/create_or_replace_view1.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/create_or_replace_view1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/create_or_replace_view1.q.out Sun Jan 13 02:11:34 2013
@@ -15,9 +15,11 @@ POSTHOOK: Output: default@v
  #### A masked pattern was here ####
  PREHOOK: query: alter view v add partition (ds='1',hr='2')
  PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@v
  POSTHOOK: query: alter view v add partition (ds='1',hr='2')
  POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@v
  POSTHOOK: Output: default@v@ds=1/hr=2
  FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has paritions

Modified: hive/trunk/ql/src/test/results/clientnegative/create_or_replace_view2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/create_or_replace_view2.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/create_or_replace_view2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/create_or_replace_view2.q.out Sun Jan 13 02:11:34 2013
@@ -15,9 +15,11 @@ POSTHOOK: Output: default@v
  #### A masked pattern was here ####
  PREHOOK: query: alter view v add partition (ds='1',hr='2')
  PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@v
  POSTHOOK: query: alter view v add partition (ds='1',hr='2')
  POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@v
  POSTHOOK: Output: default@v@ds=1/hr=2
  FAILED: SemanticException [Error 10217]: Cannot replace a view with CREATE VIEW or REPLACE VIEW or ALTER VIEW AS SELECT if the view has paritions

Modified: hive/trunk/ql/src/test/results/clientnegative/lockneg3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/lockneg3.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/lockneg3.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/lockneg3.q.out Sun Jan 13 02:11:34 2013
@@ -10,11 +10,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
  select key, value from srcpart where ds='2008-04-08' and hr='11'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
  select key, value from srcpart where ds='2008-04-08' and hr='11'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/lockneg4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/lockneg4.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/lockneg4.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/lockneg4.q.out Sun Jan 13 02:11:34 2013
@@ -10,11 +10,13 @@ POSTHOOK: Output: default@tstsrcpart
  PREHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
  select key, value from srcpart where ds='2008-04-08' and hr='11'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
  PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  PREHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: query: insert overwrite table tstsrcpart partition (ds='2008-04-08', hr='11')
  select key, value from srcpart where ds='2008-04-08' and hr='11'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
  POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
  POSTHOOK: Output: default@tstsrcpart@ds=2008-04-08/hr=11
  POSTHOOK: Lineage: tstsrcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]

Modified: hive/trunk/ql/src/test/results/clientnegative/protectmode_part1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/protectmode_part1.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/protectmode_part1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/protectmode_part1.q.out Sun Jan 13 02:11:34 2013
@@ -33,22 +33,26 @@ POSTHOOK: Output: default@tbl_protectmod
  PREHOOK: query: insert overwrite table tbl_protectmode5_1
  select col from tbl_protectmode5 where p='p1'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@tbl_protectmode5
  PREHOOK: Input: default@tbl_protectmode5@p=p1
  PREHOOK: Output: default@tbl_protectmode5_1
  POSTHOOK: query: insert overwrite table tbl_protectmode5_1
  select col from tbl_protectmode5 where p='p1'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tbl_protectmode5
  POSTHOOK: Input: default@tbl_protectmode5@p=p1
  POSTHOOK: Output: default@tbl_protectmode5_1
  POSTHOOK: Lineage: tbl_protectmode5_1.col SIMPLE [(tbl_protectmode5)tbl_protectmode5.FieldSchema(name:col, type:string, comment:null), ]
  PREHOOK: query: insert overwrite table tbl_protectmode5_1
  select col from tbl_protectmode5 where p='p2'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@tbl_protectmode5
  PREHOOK: Input: default@tbl_protectmode5@p=p2
  PREHOOK: Output: default@tbl_protectmode5_1
  POSTHOOK: query: insert overwrite table tbl_protectmode5_1
  select col from tbl_protectmode5 where p='p2'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tbl_protectmode5
  POSTHOOK: Input: default@tbl_protectmode5@p=p2
  POSTHOOK: Output: default@tbl_protectmode5_1
  POSTHOOK: Lineage: tbl_protectmode5_1.col SIMPLE [(tbl_protectmode5)tbl_protectmode5.FieldSchema(name:col, type:string, comment:null), ]
@@ -67,11 +71,13 @@ POSTHOOK: Lineage: tbl_protectmode5_1.co
  PREHOOK: query: insert overwrite table tbl_protectmode5_1
  select col from tbl_protectmode5 where p='p2'
  PREHOOK: type: QUERY
+PREHOOK: Input: default@tbl_protectmode5
  PREHOOK: Input: default@tbl_protectmode5@p=p2
  PREHOOK: Output: default@tbl_protectmode5_1
  POSTHOOK: query: insert overwrite table tbl_protectmode5_1
  select col from tbl_protectmode5 where p='p2'
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tbl_protectmode5
  POSTHOOK: Input: default@tbl_protectmode5@p=p2
  POSTHOOK: Output: default@tbl_protectmode5_1
  POSTHOOK: Lineage: tbl_protectmode5_1.col SIMPLE [(tbl_protectmode5)tbl_protectmode5.FieldSchema(name:col, type:string, comment:null), ]

Modified: hive/trunk/ql/src/test/results/clientpositive/alter_concatenate_indexed_table.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter_concatenate_indexed_table.q.out?rev=1432543&r1=1432542&r2=1432543&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter_concatenate_indexed_table.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/alter_concatenate_indexed_table.q.out Sun Jan 13 02:11:34 2013
@@ -162,19 +162,23 @@ minFileSize:206

  PREHOOK: query: select count(1) from src_rc_concatenate_test_part
  PREHOOK: type: QUERY
+PREHOOK: Input: default@src_rc_concatenate_test_part
  PREHOOK: Input: default@src_rc_concatenate_test_part@ds=2011
  #### A masked pattern was here ####
  POSTHOOK: query: select count(1) from src_rc_concatenate_test_part
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_rc_concatenate_test_part
  POSTHOOK: Input: default@src_rc_concatenate_test_part@ds=2011
  #### A masked pattern was here ####
  15
  PREHOOK: query: select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test_part
  PREHOOK: type: QUERY
+PREHOOK: Input: default@src_rc_concatenate_test_part
  PREHOOK: Input: default@src_rc_concatenate_test_part@ds=2011
  #### A masked pattern was here ####
  POSTHOOK: query: select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test_part
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_rc_concatenate_test_part
  POSTHOOK: Input: default@src_rc_concatenate_test_part@ds=2011
  #### A masked pattern was here ####
  214 -7678496319
@@ -215,19 +219,23 @@ minFileSize:239

  PREHOOK: query: select count(1) from src_rc_concatenate_test_part
  PREHOOK: type: QUERY
+PREHOOK: Input: default@src_rc_concatenate_test_part
  PREHOOK: Input: default@src_rc_concatenate_test_part@ds=2011
  #### A masked pattern was here ####
  POSTHOOK: query: select count(1) from src_rc_concatenate_test_part
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_rc_concatenate_test_part
  POSTHOOK: Input: default@src_rc_concatenate_test_part@ds=2011
  #### A masked pattern was here ####
  15
  PREHOOK: query: select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test_part
  PREHOOK: type: QUERY
+PREHOOK: Input: default@src_rc_concatenate_test_part
  PREHOOK: Input: default@src_rc_concatenate_test_part@ds=2011
  #### A masked pattern was here ####
  POSTHOOK: query: select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test_part
  POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src_rc_concatenate_test_part
  POSTHOOK: Input: default@src_rc_concatenate_test_part@ds=2011
  #### A masked pattern was here ####
  214 -7678496319

Search Discussions

Discussion Posts

Previous

Follow ups

Related Discussions

Discussion Navigation
viewthread | post
posts ‹ prev | 2 of 7 | next ›
Discussion Overview
groupcommits @
categorieshive, hadoop
postedJan 13, '13 at 2:12a
activeJan 13, '13 at 2:13a
posts7
users1
websitehive.apache.org

1 user in discussion

Kevinwilfong: 7 posts

People

Translate

site design / logo © 2021 Grokbase