PageRenderTime 22ms CodeModel.GetById 10ms app.highlight 5ms RepoModel.GetById 1ms app.codeStats 0ms

/tags/release-0.0.0-rc0/hive/external/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out

#
text | 245 lines | 239 code | 6 blank | 0 comment | 0 complexity | 55859825f43598f4e92617a9bb20e5f5 MD5 | raw file
  1PREHOOK: query: drop table dest1
  2PREHOOK: type: DROPTABLE
  3POSTHOOK: query: drop table dest1
  4POSTHOOK: type: DROPTABLE
  5PREHOOK: query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
  6PREHOOK: type: CREATETABLE
  7POSTHOOK: query: CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE
  8POSTHOOK: type: CREATETABLE
  9POSTHOOK: Output: default@dest1
 10PREHOOK: query: EXPLAIN
 11FROM (
 12  FROM src
 13  SELECT TRANSFORM(cast(src.key as tinyint), src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
 14  RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
 15  USING '/bin/cat'
 16  AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
 17  RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
 18  WHERE key < 100
 19) tmap
 20INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue ORDER by tkey, tvalue
 21PREHOOK: type: QUERY
 22POSTHOOK: query: EXPLAIN
 23FROM (
 24  FROM src
 25  SELECT TRANSFORM(cast(src.key as tinyint), src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
 26  RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
 27  USING '/bin/cat'
 28  AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
 29  RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
 30  WHERE key < 100
 31) tmap
 32INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue ORDER by tkey, tvalue
 33POSTHOOK: type: QUERY
 34ABSTRACT SYNTAX TREE:
 35  (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (TOK_FUNCTION TOK_TINYINT (. (TOK_TABLE_OR_COL src) key)) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter') '/bin/cat' (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe')) (TOK_RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader') (TOK_ALIASLIST tkey tvalue)))) (TOK_WHERE (< (TOK_TABLE_OR_COL key) 100)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME dest1))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tkey)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL tvalue)))))
 36
 37STAGE DEPENDENCIES:
 38  Stage-1 is a root stage
 39  Stage-0 depends on stages: Stage-1
 40  Stage-2 depends on stages: Stage-0
 41
 42STAGE PLANS:
 43  Stage: Stage-1
 44    Map Reduce
 45      Alias -> Map Operator Tree:
 46        tmap:src 
 47          TableScan
 48            alias: src
 49            Filter Operator
 50              predicate:
 51                  expr: (key < 100)
 52                  type: boolean
 53              Filter Operator
 54                predicate:
 55                    expr: (key < 100)
 56                    type: boolean
 57                Select Operator
 58                  expressions:
 59                        expr: UDFToByte(key)
 60                        type: tinyint
 61                        expr: value
 62                        type: string
 63                  outputColumnNames: _col0, _col1
 64                  Transform Operator
 65                    command: /bin/cat
 66                    output info:
 67                        input format: org.apache.hadoop.mapred.TextInputFormat
 68                        output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 69                    Select Operator
 70                      expressions:
 71                            expr: _col0
 72                            type: string
 73                            expr: _col1
 74                            type: string
 75                      outputColumnNames: _col0, _col1
 76                      Reduce Output Operator
 77                        key expressions:
 78                              expr: _col0
 79                              type: string
 80                              expr: _col1
 81                              type: string
 82                        sort order: ++
 83                        tag: -1
 84                        value expressions:
 85                              expr: _col0
 86                              type: string
 87                              expr: _col1
 88                              type: string
 89      Reduce Operator Tree:
 90        Extract
 91          File Output Operator
 92            compressed: false
 93            GlobalTableId: 1
 94            table:
 95                input format: org.apache.hadoop.mapred.TextInputFormat
 96                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 97                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 98                name: default.dest1
 99
100  Stage: Stage-0
101    Move Operator
102      tables:
103          replace: true
104          table:
105              input format: org.apache.hadoop.mapred.TextInputFormat
106              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
107              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
108              name: default.dest1
109
110  Stage: Stage-2
111    Stats-Aggr Operator
112
113
114PREHOOK: query: FROM (
115  FROM src
116  SELECT TRANSFORM(cast(src.key as tinyint), src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
117  RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
118  USING '/bin/cat'
119  AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
120  RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
121  WHERE key < 100
122) tmap
123INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue ORDER by tkey, tvalue
124PREHOOK: type: QUERY
125PREHOOK: Input: default@src
126PREHOOK: Output: default@dest1
127POSTHOOK: query: FROM (
128  FROM src
129  SELECT TRANSFORM(cast(src.key as tinyint), src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
130  RECORDWRITER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordWriter'
131  USING '/bin/cat'
132  AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.TypedBytesSerDe'
133  RECORDREADER 'org.apache.hadoop.hive.contrib.util.typedbytes.TypedBytesRecordReader'
134  WHERE key < 100
135) tmap
136INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue ORDER by tkey, tvalue
137POSTHOOK: type: QUERY
138POSTHOOK: Input: default@src
139POSTHOOK: Output: default@dest1
140POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
141POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
142PREHOOK: query: SELECT dest1.* FROM dest1
143PREHOOK: type: QUERY
144PREHOOK: Input: default@dest1
145PREHOOK: Output: file:/tmp/sdong/hive_2011-02-16_20-10-19_325_182915218311720960/-mr-10000
146POSTHOOK: query: SELECT dest1.* FROM dest1
147POSTHOOK: type: QUERY
148POSTHOOK: Input: default@dest1
149POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-16_20-10-19_325_182915218311720960/-mr-10000
150POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
151POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
1520	val_0
1530	val_0
1540	val_0
15510	val_10
15611	val_11
15712	val_12
15812	val_12
15915	val_15
16015	val_15
16117	val_17
16218	val_18
16318	val_18
16419	val_19
1652	val_2
16620	val_20
16724	val_24
16824	val_24
16926	val_26
17026	val_26
17127	val_27
17228	val_28
17330	val_30
17433	val_33
17534	val_34
17635	val_35
17735	val_35
17835	val_35
17937	val_37
18037	val_37
1814	val_4
18241	val_41
18342	val_42
18442	val_42
18543	val_43
18644	val_44
18747	val_47
1885	val_5
1895	val_5
1905	val_5
19151	val_51
19251	val_51
19353	val_53
19454	val_54
19557	val_57
19658	val_58
19758	val_58
19864	val_64
19965	val_65
20066	val_66
20167	val_67
20267	val_67
20369	val_69
20470	val_70
20570	val_70
20670	val_70
20772	val_72
20872	val_72
20974	val_74
21076	val_76
21176	val_76
21277	val_77
21378	val_78
2148	val_8
21580	val_80
21682	val_82
21783	val_83
21883	val_83
21984	val_84
22084	val_84
22185	val_85
22286	val_86
22387	val_87
2249	val_9
22590	val_90
22690	val_90
22790	val_90
22892	val_92
22995	val_95
23095	val_95
23196	val_96
23297	val_97
23397	val_97
23498	val_98
23598	val_98
236PREHOOK: query: drop table dest1
237PREHOOK: type: DROPTABLE
238PREHOOK: Input: default@dest1
239PREHOOK: Output: default@dest1
240POSTHOOK: query: drop table dest1
241POSTHOOK: type: DROPTABLE
242POSTHOOK: Input: default@dest1
243POSTHOOK: Output: default@dest1
244POSTHOOK: Lineage: dest1.key SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]
245POSTHOOK: Lineage: dest1.value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.FieldSchema(name:value, type:string, comment:default), ]