PageRenderTime 27ms CodeModel.GetById 16ms app.highlight 7ms RepoModel.GetById 1ms app.codeStats 0ms

/tags/release-0.0.0-rc0/hive/external/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java

#
Java | 100 lines | 62 code | 14 blank | 24 comment | 1 complexity | 6833efa91322ad354a299a43cc589dce MD5 | raw file
  1/**
  2 * Licensed to the Apache Software Foundation (ASF) under one
  3 * or more contributor license agreements.  See the NOTICE file
  4 * distributed with this work for additional information
  5 * regarding copyright ownership.  The ASF licenses this file
  6 * to you under the Apache License, Version 2.0 (the
  7 * "License"); you may not use this file except in compliance
  8 * with the License.  You may obtain a copy of the License at
  9 *
 10 *     http://www.apache.org/licenses/LICENSE-2.0
 11 *
 12 * Unless required by applicable law or agreed to in writing, software
 13 * distributed under the License is distributed on an "AS IS" BASIS,
 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 15 * See the License for the specific language governing permissions and
 16 * limitations under the License.
 17 */
 18
 19package org.apache.hadoop.hive.ql.io;
 20
 21import java.io.IOException;
 22
 23import org.apache.hadoop.conf.Configuration;
 24import org.apache.hadoop.hive.ql.exec.ExecMapper;
 25import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.CombineHiveInputSplit;
 26import org.apache.hadoop.hive.shims.HadoopShims.InputSplitShim;
 27import org.apache.hadoop.io.Writable;
 28import org.apache.hadoop.io.WritableComparable;
 29import org.apache.hadoop.mapred.FileSplit;
 30import org.apache.hadoop.mapred.InputFormat;
 31import org.apache.hadoop.mapred.InputSplit;
 32import org.apache.hadoop.mapred.JobConf;
 33import org.apache.hadoop.mapred.RecordReader;
 34import org.apache.hadoop.mapred.Reporter;
 35
 36/**
 37 * CombineHiveRecordReader.
 38 *
 39 * @param <K>
 40 * @param <V>
 41 */
 42public class CombineHiveRecordReader<K extends WritableComparable, V extends Writable>
 43    extends HiveContextAwareRecordReader<K, V> {
 44
 45  private final RecordReader recordReader;
 46
 47  public CombineHiveRecordReader(InputSplit split, Configuration conf,
 48      Reporter reporter, Integer partition) throws IOException {
 49    JobConf job = (JobConf) conf;
 50    CombineHiveInputSplit hsplit = new CombineHiveInputSplit(job,
 51        (InputSplitShim) split);
 52    String inputFormatClassName = hsplit.inputFormatClassName();
 53    Class inputFormatClass = null;
 54    try {
 55      inputFormatClass = Class.forName(inputFormatClassName);
 56    } catch (ClassNotFoundException e) {
 57      throw new IOException("CombineHiveRecordReader: class not found "
 58          + inputFormatClassName);
 59    }
 60    InputFormat inputFormat = HiveInputFormat.getInputFormatFromCache(
 61        inputFormatClass, job);
 62
 63    // create a split for the given partition
 64    FileSplit fsplit = new FileSplit(hsplit.getPaths()[partition], hsplit
 65        .getStartOffsets()[partition], hsplit.getLengths()[partition], hsplit
 66        .getLocations());
 67
 68    this.recordReader = inputFormat.getRecordReader(fsplit, job, reporter);
 69    this.initIOContext(fsplit, job, inputFormatClass, this.recordReader);
 70  }
 71
 72  @Override
 73  public void doClose() throws IOException {
 74    recordReader.close();
 75  }
 76
 77  public K createKey() {
 78    return (K) recordReader.createKey();
 79  }
 80
 81  public V createValue() {
 82    return (V) recordReader.createValue();
 83  }
 84
 85  public long getPos() throws IOException {
 86    return recordReader.getPos();
 87  }
 88
 89  public float getProgress() throws IOException {
 90    return recordReader.getProgress();
 91  }
 92
 93  @Override
 94  public boolean doNext(K key, V value) throws IOException {
 95    if (ExecMapper.getDone()) {
 96      return false;
 97    }
 98    return recordReader.next(key, value);
 99  }
100}