PageRenderTime 51ms CodeModel.GetById 24ms RepoModel.GetById 0ms app.codeStats 0ms

/tags/release-0.2.0-rc0/src/test/org/apache/hcatalog/data/TestDefaultHCatRecord.java

#
Java | 154 lines | 112 code | 25 blank | 17 comment | 2 complexity | 31b6b3c2818737608b9b32c12c6c95a7 MD5 | raw file
Possible License(s): Apache-2.0, BSD-3-Clause, JSON, CPL-1.0
  1. /*
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. package org.apache.hcatalog.data;
  19. import java.io.DataInput;
  20. import java.io.DataInputStream;
  21. import java.io.DataOutput;
  22. import java.io.DataOutputStream;
  23. import java.io.File;
  24. import java.io.FileInputStream;
  25. import java.io.FileOutputStream;
  26. import java.io.IOException;
  27. import java.io.InputStream;
  28. import java.io.OutputStream;
  29. import java.util.ArrayList;
  30. import java.util.HashMap;
  31. import java.util.List;
  32. import java.util.Map;
  33. import org.apache.hcatalog.data.DefaultHCatRecord;
  34. import org.apache.hcatalog.data.HCatRecord;
  35. import junit.framework.Assert;
  36. import junit.framework.TestCase;
  37. public class TestDefaultHCatRecord extends TestCase{
  38. public void testRYW() throws IOException{
  39. File f = new File("binary.dat");
  40. f.delete();
  41. f.createNewFile();
  42. f.deleteOnExit();
  43. OutputStream fileOutStream = new FileOutputStream(f);
  44. DataOutput outStream = new DataOutputStream(fileOutStream);
  45. HCatRecord[] recs = getHCatRecords();
  46. for(int i =0; i < recs.length; i++){
  47. recs[i].write(outStream);
  48. }
  49. fileOutStream.flush();
  50. fileOutStream.close();
  51. InputStream fInStream = new FileInputStream(f);
  52. DataInput inpStream = new DataInputStream(fInStream);
  53. for(int i =0; i < recs.length; i++){
  54. HCatRecord rec = new DefaultHCatRecord();
  55. rec.readFields(inpStream);
  56. Assert.assertEquals(recs[i],rec);
  57. }
  58. Assert.assertEquals(fInStream.available(), 0);
  59. fInStream.close();
  60. }
  61. public void testCompareTo() {
  62. HCatRecord[] recs = getHCatRecords();
  63. Assert.assertEquals(recs[0].compareTo(recs[1]),0);
  64. }
  65. public void testEqualsObject() {
  66. HCatRecord[] recs = getHCatRecords();
  67. Assert.assertTrue(recs[0].equals(recs[1]));
  68. }
  69. private HCatRecord[] getHCatRecords(){
  70. List<Object> rec_1 = new ArrayList<Object>(8);
  71. rec_1.add(new Byte("123"));
  72. rec_1.add(new Short("456"));
  73. rec_1.add( new Integer(789));
  74. rec_1.add( new Long(1000L));
  75. rec_1.add( new Double(5.3D));
  76. rec_1.add( new String("hcat and hadoop"));
  77. rec_1.add( null);
  78. rec_1.add( "null");
  79. HCatRecord tup_1 = new DefaultHCatRecord(rec_1);
  80. List<Object> rec_2 = new ArrayList<Object>(8);
  81. rec_2.add( new Byte("123"));
  82. rec_2.add( new Short("456"));
  83. rec_2.add( new Integer(789));
  84. rec_2.add( new Long(1000L));
  85. rec_2.add( new Double(5.3D));
  86. rec_2.add( new String("hcat and hadoop"));
  87. rec_2.add( null);
  88. rec_2.add( "null");
  89. HCatRecord tup_2 = new DefaultHCatRecord(rec_2);
  90. List<Object> rec_3 = new ArrayList<Object>(10);
  91. rec_3.add(new Byte("123"));
  92. rec_3.add(new Short("456"));
  93. rec_3.add( new Integer(789));
  94. rec_3.add( new Long(1000L));
  95. rec_3.add( new Double(5.3D));
  96. rec_3.add( new String("hcat and hadoop"));
  97. rec_3.add( null);
  98. List<Integer> innerList = new ArrayList<Integer>();
  99. innerList.add(314);
  100. innerList.add(007);
  101. rec_3.add( innerList);
  102. Map<Short, String> map = new HashMap<Short, String>(3);
  103. map.put(new Short("2"), "hcat is cool");
  104. map.put(new Short("3"), "is it?");
  105. map.put(new Short("4"), "or is it not?");
  106. rec_3.add(map);
  107. HCatRecord tup_3 = new DefaultHCatRecord(rec_3);
  108. List<Object> rec_4 = new ArrayList<Object>(8);
  109. rec_4.add( new Byte("123"));
  110. rec_4.add( new Short("456"));
  111. rec_4.add( new Integer(789));
  112. rec_4.add( new Long(1000L));
  113. rec_4.add( new Double(5.3D));
  114. rec_4.add( new String("hcat and hadoop"));
  115. rec_4.add( null);
  116. rec_4.add( "null");
  117. Map<Short, String> map2 = new HashMap<Short, String>(3);
  118. map2.put(new Short("2"), "hcat is cool");
  119. map2.put(new Short("3"), "is it?");
  120. map2.put(new Short("4"), "or is it not?");
  121. rec_4.add(map2);
  122. List<Integer> innerList2 = new ArrayList<Integer>();
  123. innerList2.add(314);
  124. innerList2.add(007);
  125. rec_4.add( innerList2);
  126. HCatRecord tup_4 = new DefaultHCatRecord(rec_4);
  127. return new HCatRecord[]{tup_1,tup_2,tup_3,tup_4};
  128. }
  129. }