PageRenderTime 43ms CodeModel.GetById 16ms RepoModel.GetById 0ms app.codeStats 0ms

/tags/release-0.0.0-rc0/hive/external/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java

#
Java | 184 lines | 126 code | 26 blank | 32 comment | 13 complexity | 5c73734931b427b7179034d6ce72e31a MD5 | raw file
Possible License(s): Apache-2.0, BSD-3-Clause, JSON, CPL-1.0
  1. package org.apache.hadoop.hive.ql.history;
  2. /**
  3. * Licensed to the Apache Software Foundation (ASF) under one
  4. * or more contributor license agreements. See the NOTICE file
  5. * distributed with this work for additional information
  6. * regarding copyright ownership. The ASF licenses this file
  7. * to you under the Apache License, Version 2.0 (the
  8. * "License"); you may not use this file except in compliance
  9. * with the License. You may obtain a copy of the License at
  10. *
  11. * http://www.apache.org/licenses/LICENSE-2.0
  12. *
  13. * Unless required by applicable law or agreed to in writing, software
  14. * distributed under the License is distributed on an "AS IS" BASIS,
  15. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16. * See the License for the specific language governing permissions and
  17. * limitations under the License.
  18. */
  19. import java.io.PrintStream;
  20. import java.io.UnsupportedEncodingException;
  21. import java.util.LinkedList;
  22. import java.util.Map;
  23. import junit.framework.TestCase;
  24. import org.apache.hadoop.fs.FileSystem;
  25. import org.apache.hadoop.fs.Path;
  26. import org.apache.hadoop.hive.cli.CliSessionState;
  27. import org.apache.hadoop.hive.conf.HiveConf;
  28. import org.apache.hadoop.hive.metastore.MetaStoreUtils;
  29. import org.apache.hadoop.hive.ql.Driver;
  30. import org.apache.hadoop.hive.ql.QTestUtil.QTestSetup;
  31. import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
  32. import org.apache.hadoop.hive.ql.history.HiveHistory.QueryInfo;
  33. import org.apache.hadoop.hive.ql.history.HiveHistory.TaskInfo;
  34. import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
  35. import org.apache.hadoop.hive.ql.metadata.Hive;
  36. import org.apache.hadoop.hive.ql.session.SessionState;
  37. import org.apache.hadoop.hive.ql.tools.LineageInfo;
  38. import org.apache.hadoop.mapred.TextInputFormat;
  39. /**
  40. * TestHiveHistory.
  41. *
  42. */
  43. public class TestHiveHistory extends TestCase {
  44. static HiveConf conf;
  45. private static String tmpdir = "/tmp/" + System.getProperty("user.name")
  46. + "/";
  47. private static Path tmppath = new Path(tmpdir);
  48. private static Hive db;
  49. private static FileSystem fs;
  50. private QTestSetup setup;
  51. /*
  52. * intialize the tables
  53. */
  54. @Override
  55. protected void setUp() {
  56. try {
  57. conf = new HiveConf(HiveHistory.class);
  58. fs = FileSystem.get(conf);
  59. if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDir()) {
  60. throw new RuntimeException(tmpdir + " exists but is not a directory");
  61. }
  62. if (!fs.exists(tmppath)) {
  63. if (!fs.mkdirs(tmppath)) {
  64. throw new RuntimeException("Could not make scratch directory "
  65. + tmpdir);
  66. }
  67. }
  68. setup = new QTestSetup();
  69. setup.preTest(conf);
  70. // copy the test files into hadoop if required.
  71. int i = 0;
  72. Path[] hadoopDataFile = new Path[2];
  73. String[] testFiles = {"kv1.txt", "kv2.txt"};
  74. String testFileDir = "file://"
  75. + conf.get("test.data.files").replace('\\', '/').replace("c:", "");
  76. for (String oneFile : testFiles) {
  77. Path localDataFile = new Path(testFileDir, oneFile);
  78. hadoopDataFile[i] = new Path(tmppath, oneFile);
  79. fs.copyFromLocalFile(false, true, localDataFile, hadoopDataFile[i]);
  80. i++;
  81. }
  82. // load the test files into tables
  83. i = 0;
  84. db = Hive.get(conf);
  85. String[] srctables = {"src", "src2"};
  86. LinkedList<String> cols = new LinkedList<String>();
  87. cols.add("key");
  88. cols.add("value");
  89. for (String src : srctables) {
  90. db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true);
  91. db.createTable(src, cols, null, TextInputFormat.class,
  92. IgnoreKeyTextOutputFormat.class);
  93. db.loadTable(hadoopDataFile[i], src, false, false);
  94. i++;
  95. }
  96. } catch (Throwable e) {
  97. e.printStackTrace();
  98. throw new RuntimeException("Encountered throwable");
  99. }
  100. }
  101. @Override
  102. protected void tearDown() {
  103. try {
  104. setup.tearDown();
  105. }
  106. catch (Exception e) {
  107. System.out.println("Exception: " + e.getMessage());
  108. e.printStackTrace();
  109. System.out.flush();
  110. fail("Unexpected exception in tearDown");
  111. }
  112. }
  113. /**
  114. * Check history file output for this query.
  115. */
  116. public void testSimpleQuery() {
  117. new LineageInfo();
  118. try {
  119. // NOTE: It is critical to do this here so that log4j is reinitialized
  120. // before
  121. // any of the other core hive classes are loaded
  122. SessionState.initHiveLog4j();
  123. CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
  124. ss.in = System.in;
  125. try {
  126. ss.out = new PrintStream(System.out, true, "UTF-8");
  127. ss.err = new PrintStream(System.err, true, "UTF-8");
  128. } catch (UnsupportedEncodingException e) {
  129. System.exit(3);
  130. }
  131. SessionState.start(ss);
  132. String cmd = "select a.key from src a";
  133. Driver d = new Driver(conf);
  134. int ret = d.run(cmd).getResponseCode();
  135. if (ret != 0) {
  136. fail("Failed");
  137. }
  138. HiveHistoryViewer hv = new HiveHistoryViewer(SessionState.get()
  139. .getHiveHistory().getHistFileName());
  140. Map<String, QueryInfo> jobInfoMap = hv.getJobInfoMap();
  141. Map<String, TaskInfo> taskInfoMap = hv.getTaskInfoMap();
  142. if (jobInfoMap.size() != 1) {
  143. fail("jobInfo Map size not 1");
  144. }
  145. if (taskInfoMap.size() != 1) {
  146. fail("jobInfo Map size not 1");
  147. }
  148. cmd = (String) jobInfoMap.keySet().toArray()[0];
  149. QueryInfo ji = jobInfoMap.get(cmd);
  150. if (!ji.hm.get(Keys.QUERY_NUM_TASKS.name()).equals("1")) {
  151. fail("Wrong number of tasks");
  152. }
  153. } catch (Exception e) {
  154. e.printStackTrace();
  155. fail("Failed");
  156. }
  157. }
  158. }