/tags/release-0.0.0-rc0/hive/external/hbase-handler/src/test/results/hbase_bulk.m.out

# · text · 133 lines · 131 code · 2 blank · 0 comment · 0 complexity · 5c460e73077ad8d1074b8388c18fc543 MD5 · raw file

  1. PREHOOK: query: drop table hbsort
  2. PREHOOK: type: DROPTABLE
  3. POSTHOOK: query: drop table hbsort
  4. POSTHOOK: type: DROPTABLE
  5. PREHOOK: query: drop table hbpartition
  6. PREHOOK: type: DROPTABLE
  7. POSTHOOK: query: drop table hbpartition
  8. POSTHOOK: type: DROPTABLE
  9. PREHOOK: query: -- this is a dummy table used for controlling how the HFiles are
  10. -- created
  11. create table hbsort(key string, val string, val2 string)
  12. stored as
  13. INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
  14. OUTPUTFORMAT 'org.apache.hadoop.hive.hbase.HiveHFileOutputFormat'
  15. TBLPROPERTIES ('hfile.family.path' = '/tmp/hbsort/cf')
  16. PREHOOK: type: CREATETABLE
  17. POSTHOOK: query: -- this is a dummy table used for controlling how the HFiles are
  18. -- created
  19. create table hbsort(key string, val string, val2 string)
  20. stored as
  21. INPUTFORMAT 'org.apache.hadoop.mapred.TextInputFormat'
  22. OUTPUTFORMAT 'org.apache.hadoop.hive.hbase.HiveHFileOutputFormat'
  23. TBLPROPERTIES ('hfile.family.path' = '/tmp/hbsort/cf')
  24. POSTHOOK: type: CREATETABLE
  25. POSTHOOK: Output: default@hbsort
  26. PREHOOK: query: -- this is a dummy table used for controlling how the input file
  27. -- for TotalOrderPartitioner is created
  28. create table hbpartition(part_break string)
  29. row format serde
  30. 'org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe'
  31. stored as
  32. inputformat
  33. 'org.apache.hadoop.mapred.TextInputFormat'
  34. outputformat
  35. 'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat'
  36. PREHOOK: type: CREATETABLE
  37. POSTHOOK: query: -- this is a dummy table used for controlling how the input file
  38. -- for TotalOrderPartitioner is created
  39. create table hbpartition(part_break string)
  40. row format serde
  41. 'org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe'
  42. stored as
  43. inputformat
  44. 'org.apache.hadoop.mapred.TextInputFormat'
  45. outputformat
  46. 'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat'
  47. POSTHOOK: type: CREATETABLE
  48. POSTHOOK: Output: default@hbpartition
  49. PREHOOK: query: -- this should produce one file, but we do not
  50. -- know what it will be called, so we will copy it to a well known
  51. -- filename /tmp/hbpartition.lst
  52. insert overwrite table hbpartition
  53. select distinct value
  54. from src
  55. where value='val_100' or value='val_200'
  56. PREHOOK: type: QUERY
  57. PREHOOK: Input: default@src
  58. PREHOOK: Output: default@hbpartition
  59. POSTHOOK: query: -- this should produce one file, but we do not
  60. -- know what it will be called, so we will copy it to a well known
  61. -- filename /tmp/hbpartition.lst
  62. insert overwrite table hbpartition
  63. select distinct value
  64. from src
  65. where value='val_100' or value='val_200'
  66. POSTHOOK: type: QUERY
  67. POSTHOOK: Input: default@src
  68. POSTHOOK: Output: default@hbpartition
  69. POSTHOOK: Lineage: hbpartition.part_break SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  70. 1 1 139 hdfs://localhost.localdomain:37780/build/ql/test/data/warehouse/hbpartition
  71. PREHOOK: query: -- this should produce three files in /tmp/hbsort/cf
  72. -- include some trailing blanks and nulls to make sure we handle them correctly
  73. insert overwrite table hbsort
  74. select distinct value,
  75. case when key=103 then cast(null as string) else key end,
  76. case when key=103 then ''
  77. else cast(key+1 as string) end
  78. from src
  79. cluster by value
  80. PREHOOK: type: QUERY
  81. PREHOOK: Input: default@src
  82. PREHOOK: Output: default@hbsort
  83. POSTHOOK: query: -- this should produce three files in /tmp/hbsort/cf
  84. -- include some trailing blanks and nulls to make sure we handle them correctly
  85. insert overwrite table hbsort
  86. select distinct value,
  87. case when key=103 then cast(null as string) else key end,
  88. case when key=103 then ''
  89. else cast(key+1 as string) end
  90. from src
  91. cluster by value
  92. POSTHOOK: type: QUERY
  93. POSTHOOK: Input: default@src
  94. POSTHOOK: Output: default@hbsort
  95. POSTHOOK: Lineage: hbpartition.part_break SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  96. POSTHOOK: Lineage: hbsort.key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  97. POSTHOOK: Lineage: hbsort.val EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
  98. POSTHOOK: Lineage: hbsort.val2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
  99. 1 3 23380 hdfs://localhost.localdomain:37780/tmp/hbsort/cf
  100. PREHOOK: query: -- To get the files out to your local filesystem for loading into
  101. -- HBase, run mkdir -p /tmp/blah/cf, then uncomment and
  102. -- semicolon-terminate the line below before running this test:
  103. -- dfs -copyToLocal /tmp/hbsort/cf/* /tmp/blah/cf
  104. drop table hbsort
  105. PREHOOK: type: DROPTABLE
  106. PREHOOK: Input: default@hbsort
  107. PREHOOK: Output: default@hbsort
  108. POSTHOOK: query: -- To get the files out to your local filesystem for loading into
  109. -- HBase, run mkdir -p /tmp/blah/cf, then uncomment and
  110. -- semicolon-terminate the line below before running this test:
  111. -- dfs -copyToLocal /tmp/hbsort/cf/* /tmp/blah/cf
  112. drop table hbsort
  113. POSTHOOK: type: DROPTABLE
  114. POSTHOOK: Input: default@hbsort
  115. POSTHOOK: Output: default@hbsort
  116. POSTHOOK: Lineage: hbpartition.part_break SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  117. POSTHOOK: Lineage: hbsort.key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  118. POSTHOOK: Lineage: hbsort.val EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
  119. POSTHOOK: Lineage: hbsort.val2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
  120. PREHOOK: query: drop table hbpartition
  121. PREHOOK: type: DROPTABLE
  122. PREHOOK: Input: default@hbpartition
  123. PREHOOK: Output: default@hbpartition
  124. POSTHOOK: query: drop table hbpartition
  125. POSTHOOK: type: DROPTABLE
  126. POSTHOOK: Input: default@hbpartition
  127. POSTHOOK: Output: default@hbpartition
  128. POSTHOOK: Lineage: hbpartition.part_break SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  129. POSTHOOK: Lineage: hbsort.key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
  130. POSTHOOK: Lineage: hbsort.val EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
  131. POSTHOOK: Lineage: hbsort.val2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]