PageRenderTime 46ms CodeModel.GetById 33ms app.highlight 9ms RepoModel.GetById 1ms app.codeStats 0ms

/tags/release-0.0.0-rc0/hive/external/ql/src/test/org/apache/hadoop/hive/ql/history/TestHiveHistory.java

#
Java | 184 lines | 126 code | 26 blank | 32 comment | 13 complexity | 5c73734931b427b7179034d6ce72e31a MD5 | raw file
  1package org.apache.hadoop.hive.ql.history;
  2
  3/**
  4 * Licensed to the Apache Software Foundation (ASF) under one
  5 * or more contributor license agreements.  See the NOTICE file
  6 * distributed with this work for additional information
  7 * regarding copyright ownership.  The ASF licenses this file
  8 * to you under the Apache License, Version 2.0 (the
  9 * "License"); you may not use this file except in compliance
 10 * with the License.  You may obtain a copy of the License at
 11 *
 12 *     http://www.apache.org/licenses/LICENSE-2.0
 13 *
 14 * Unless required by applicable law or agreed to in writing, software
 15 * distributed under the License is distributed on an "AS IS" BASIS,
 16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 17 * See the License for the specific language governing permissions and
 18 * limitations under the License.
 19 */
 20
 21import java.io.PrintStream;
 22import java.io.UnsupportedEncodingException;
 23import java.util.LinkedList;
 24import java.util.Map;
 25
 26import junit.framework.TestCase;
 27
 28import org.apache.hadoop.fs.FileSystem;
 29import org.apache.hadoop.fs.Path;
 30import org.apache.hadoop.hive.cli.CliSessionState;
 31import org.apache.hadoop.hive.conf.HiveConf;
 32import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 33import org.apache.hadoop.hive.ql.Driver;
 34import org.apache.hadoop.hive.ql.QTestUtil.QTestSetup;
 35import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
 36import org.apache.hadoop.hive.ql.history.HiveHistory.QueryInfo;
 37import org.apache.hadoop.hive.ql.history.HiveHistory.TaskInfo;
 38import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
 39import org.apache.hadoop.hive.ql.metadata.Hive;
 40import org.apache.hadoop.hive.ql.session.SessionState;
 41import org.apache.hadoop.hive.ql.tools.LineageInfo;
 42import org.apache.hadoop.mapred.TextInputFormat;
 43
 44/**
 45 * TestHiveHistory.
 46 *
 47 */
 48public class TestHiveHistory extends TestCase {
 49
 50  static HiveConf conf;
 51
 52  private static String tmpdir = "/tmp/" + System.getProperty("user.name")
 53      + "/";
 54  private static Path tmppath = new Path(tmpdir);
 55  private static Hive db;
 56  private static FileSystem fs;
 57  private QTestSetup setup;
 58
 59  /*
 60   * intialize the tables
 61   */
 62
 63  @Override
 64  protected void setUp() {
 65    try {
 66      conf = new HiveConf(HiveHistory.class);
 67
 68      fs = FileSystem.get(conf);
 69      if (fs.exists(tmppath) && !fs.getFileStatus(tmppath).isDir()) {
 70        throw new RuntimeException(tmpdir + " exists but is not a directory");
 71      }
 72
 73      if (!fs.exists(tmppath)) {
 74        if (!fs.mkdirs(tmppath)) {
 75          throw new RuntimeException("Could not make scratch directory "
 76              + tmpdir);
 77        }
 78      }
 79
 80      setup = new QTestSetup();
 81      setup.preTest(conf);
 82
 83      // copy the test files into hadoop if required.
 84      int i = 0;
 85      Path[] hadoopDataFile = new Path[2];
 86      String[] testFiles = {"kv1.txt", "kv2.txt"};
 87      String testFileDir = "file://"
 88          + conf.get("test.data.files").replace('\\', '/').replace("c:", "");
 89      for (String oneFile : testFiles) {
 90        Path localDataFile = new Path(testFileDir, oneFile);
 91        hadoopDataFile[i] = new Path(tmppath, oneFile);
 92        fs.copyFromLocalFile(false, true, localDataFile, hadoopDataFile[i]);
 93        i++;
 94      }
 95
 96      // load the test files into tables
 97      i = 0;
 98      db = Hive.get(conf);
 99      String[] srctables = {"src", "src2"};
100      LinkedList<String> cols = new LinkedList<String>();
101      cols.add("key");
102      cols.add("value");
103      for (String src : srctables) {
104        db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true);
105        db.createTable(src, cols, null, TextInputFormat.class,
106            IgnoreKeyTextOutputFormat.class);
107        db.loadTable(hadoopDataFile[i], src, false, false);
108        i++;
109      }
110
111    } catch (Throwable e) {
112      e.printStackTrace();
113      throw new RuntimeException("Encountered throwable");
114    }
115  }
116
117  @Override
118  protected void tearDown() {
119    try {
120      setup.tearDown();
121    }
122    catch (Exception e) {
123      System.out.println("Exception: " + e.getMessage());
124      e.printStackTrace();
125      System.out.flush();
126      fail("Unexpected exception in tearDown");
127    }
128  }
129
130  /**
131   * Check history file output for this query.
132   */
133  public void testSimpleQuery() {
134    new LineageInfo();
135    try {
136
137      // NOTE: It is critical to do this here so that log4j is reinitialized
138      // before
139      // any of the other core hive classes are loaded
140      SessionState.initHiveLog4j();
141
142      CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
143      ss.in = System.in;
144      try {
145        ss.out = new PrintStream(System.out, true, "UTF-8");
146        ss.err = new PrintStream(System.err, true, "UTF-8");
147      } catch (UnsupportedEncodingException e) {
148        System.exit(3);
149      }
150
151      SessionState.start(ss);
152
153      String cmd = "select a.key from src a";
154      Driver d = new Driver(conf);
155      int ret = d.run(cmd).getResponseCode();
156      if (ret != 0) {
157        fail("Failed");
158      }
159      HiveHistoryViewer hv = new HiveHistoryViewer(SessionState.get()
160          .getHiveHistory().getHistFileName());
161      Map<String, QueryInfo> jobInfoMap = hv.getJobInfoMap();
162      Map<String, TaskInfo> taskInfoMap = hv.getTaskInfoMap();
163      if (jobInfoMap.size() != 1) {
164        fail("jobInfo Map size not 1");
165      }
166
167      if (taskInfoMap.size() != 1) {
168        fail("jobInfo Map size not 1");
169      }
170
171      cmd = (String) jobInfoMap.keySet().toArray()[0];
172      QueryInfo ji = jobInfoMap.get(cmd);
173
174      if (!ji.hm.get(Keys.QUERY_NUM_TASKS.name()).equals("1")) {
175        fail("Wrong number of tasks");
176      }
177
178    } catch (Exception e) {
179      e.printStackTrace();
180      fail("Failed");
181    }
182  }
183
184}