PageRenderTime 37ms CodeModel.GetById 14ms RepoModel.GetById 0ms app.codeStats 0ms

/tags/release-0.0.0-rc0/hive/external/ql/src/java/org/apache/hadoop/hive/ql/exec/BinaryRecordReader.java

#
Java | 64 lines | 33 code | 9 blank | 22 comment | 3 complexity | b9070fcb60d50e83f671838c0e17f9ba MD5 | raw file
Possible License(s): Apache-2.0, BSD-3-Clause, JSON, CPL-1.0
  1. /**
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. package org.apache.hadoop.hive.ql.exec;
  19. import java.io.IOException;
  20. import java.io.InputStream;
  21. import java.util.Properties;
  22. import org.apache.hadoop.conf.Configuration;
  23. import org.apache.hadoop.io.BytesWritable;
  24. import org.apache.hadoop.io.Writable;
  25. /**
  26. * Read from a binary stream and treat each 1000 bytes (configurable via
  27. * hive.binary.record.max.length) as a record. The last record before the
  28. * end of stream can have less than 1000 bytes.
  29. */
  30. public class BinaryRecordReader implements RecordReader {
  31. private InputStream in;
  32. private BytesWritable bytes;
  33. private int maxRecordLength;
  34. public void initialize(InputStream in, Configuration conf, Properties tbl) throws IOException {
  35. this.in = in;
  36. maxRecordLength = conf.getInt("hive.binary.record.max.length", 1000);
  37. }
  38. public Writable createRow() throws IOException {
  39. bytes = new BytesWritable();
  40. bytes.setCapacity(maxRecordLength);
  41. return bytes;
  42. }
  43. public int next(Writable row) throws IOException {
  44. int recordLength = in.read(bytes.get(), 0, maxRecordLength);
  45. if (recordLength >= 0) {
  46. bytes.setSize(recordLength);
  47. }
  48. return recordLength;
  49. }
  50. public void close() throws IOException {
  51. if (in != null) {
  52. in.close();
  53. }
  54. }
  55. }