PageRenderTime 51ms CodeModel.GetById 23ms RepoModel.GetById 0ms app.codeStats 0ms

/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java

http://github.com/apache/hive
Java | 149 lines | 107 code | 21 blank | 21 comment | 7 complexity | d60d9889b504332c80a28f72b55bd35c MD5 | raw file
Possible License(s): Apache-2.0
  1. /*
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. package org.apache.hadoop.hive.ql.exec.persistence;
  19. import java.io.IOException;
  20. import java.util.ArrayList;
  21. import java.util.List;
  22. import java.util.Properties;
  23. import java.util.Random;
  24. import org.apache.hadoop.conf.Configuration;
  25. import org.apache.hadoop.fs.FileSystem;
  26. import org.apache.hadoop.hive.ql.metadata.HiveException;
  27. import org.apache.hadoop.hive.serde2.AbstractSerDe;
  28. import org.apache.hadoop.hive.serde2.SerDeException;
  29. import org.apache.hadoop.hive.serde2.io.DoubleWritable;
  30. import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe;
  31. import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
  32. import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
  33. import org.apache.hadoop.io.IntWritable;
  34. import org.apache.hadoop.io.Text;
  35. import org.junit.BeforeClass;
  36. import org.junit.Test;
  37. import static org.junit.Assert.assertEquals;
  38. @SuppressWarnings("deprecation")
  39. public class TestPTFRowContainer {
  40. private static final String COL_NAMES = "x,y,z,a,b,v";
  41. private static final String COL_TYPES = "int,string,double,int,string,string";
  42. static AbstractSerDe serDe;
  43. static Configuration cfg;
  44. @BeforeClass
  45. public static void setupClass() throws SerDeException {
  46. cfg = new Configuration();
  47. serDe = new LazyBinarySerDe();
  48. Properties p = new Properties();
  49. p.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS,
  50. COL_NAMES);
  51. p.setProperty(
  52. org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES,
  53. COL_TYPES);
  54. serDe.initialize(cfg, p, null);
  55. }
  56. private PTFRowContainer<List<Object>> rowContainer(int blockSize)
  57. throws SerDeException, HiveException {
  58. PTFRowContainer<List<Object>> rc = new PTFRowContainer<List<Object>>(blockSize, cfg, null);
  59. rc.setSerDe(serDe,
  60. ObjectInspectorUtils.getStandardObjectInspector(serDe.getObjectInspector()));
  61. rc.setTableDesc(
  62. PTFRowContainer.createTableDesc((StructObjectInspector) serDe.getObjectInspector()));
  63. return rc;
  64. }
  65. private void runTest(int sz, int blockSize, String value) throws SerDeException, HiveException {
  66. List<Object> row;
  67. PTFRowContainer<List<Object>> rc = rowContainer(blockSize);
  68. int i;
  69. for(i =0; i < sz; i++) {
  70. row = new ArrayList<Object>();
  71. row.add(new IntWritable(i));
  72. row.add(new Text("abc " + i));
  73. row.add(new DoubleWritable(i));
  74. row.add(new IntWritable(i));
  75. row.add(new Text("def " + i));
  76. row.add(new Text(value));
  77. rc.addRow(row);
  78. }
  79. // test forward scan
  80. assertEquals(sz, rc.rowCount());
  81. i = 0;
  82. row = new ArrayList<Object>();
  83. row = rc.first();
  84. while(row != null ) {
  85. assertEquals("abc " + i, row.get(1).toString());
  86. i++;
  87. row = rc.next();
  88. }
  89. // test backward scan
  90. row = rc.first();
  91. for(i = sz - 1; i >= 0; i-- ) {
  92. row = rc.getAt(i);
  93. assertEquals("abc " + i, row.get(1).toString());
  94. }
  95. Random r = new Random(1000L);
  96. //test random scan
  97. for(i=0; i < 100; i++) {
  98. int j = r.nextInt(sz);
  99. row = rc.getAt(j);
  100. assertEquals("abc " + j, row.get(1).toString());
  101. }
  102. // intersperse getAt and next calls
  103. for(i=0; i < 100; i++) {
  104. int j = r.nextInt(sz);
  105. row = rc.getAt(j);
  106. assertEquals("abc " + j, row.get(1).toString());
  107. for(int k = j + 1; k < j + (blockSize/4) && k < sz; k++) {
  108. row = rc.next();
  109. assertEquals("def " + k, row.get(4).toString());
  110. }
  111. }
  112. }
  113. private void runTest(int sz, int blockSize) throws SerDeException, HiveException {
  114. runTest(sz, blockSize, "");
  115. }
  116. @Test
  117. public void testLargeBlockSize() throws SerDeException, HiveException {
  118. runTest(100 * 1000, 25 * 1000);
  119. }
  120. @Test
  121. public void testSmallBlockSize() throws SerDeException, HiveException {
  122. runTest(10 * 1000, 5);
  123. }
  124. @Test
  125. public void testBlocksLargerThanSplit() throws SerDeException, HiveException, IOException {
  126. runTest(5, 2, new String(new char[(int)FileSystem.getLocal(cfg).getDefaultBlockSize()]));
  127. }
  128. }