PageRenderTime 41ms CodeModel.GetById 13ms RepoModel.GetById 0ms app.codeStats 0ms

/tags/release-0.0.0-rc0/hive/external/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveQueryResultSet.java

#
Java | 181 lines | 118 code | 23 blank | 40 comment | 19 complexity | 33f0902f88c35c006240931809682e1a MD5 | raw file
Possible License(s): Apache-2.0, BSD-3-Clause, JSON, CPL-1.0
  1. /**
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. package org.apache.hadoop.hive.jdbc;
  19. import java.sql.SQLException;
  20. import java.util.ArrayList;
  21. import java.util.Arrays;
  22. import java.util.List;
  23. import java.util.Properties;
  24. import org.apache.commons.logging.Log;
  25. import org.apache.commons.logging.LogFactory;
  26. import org.apache.hadoop.conf.Configuration;
  27. import org.apache.hadoop.hive.metastore.api.FieldSchema;
  28. import org.apache.hadoop.hive.metastore.api.Schema;
  29. import org.apache.hadoop.hive.serde.Constants;
  30. import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
  31. import org.apache.hadoop.hive.serde2.SerDe;
  32. import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
  33. import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
  34. import org.apache.hadoop.hive.serde2.objectinspector.StructField;
  35. import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
  36. import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
  37. import org.apache.hadoop.hive.service.HiveInterface;
  38. import org.apache.hadoop.io.BytesWritable;
  39. /**
  40. * HiveQueryResultSet.
  41. *
  42. */
  43. public class HiveQueryResultSet extends HiveBaseResultSet {
  44. public static final Log LOG = LogFactory.getLog(HiveQueryResultSet.class);
  45. private HiveInterface client;
  46. private SerDe serde;
  47. private int maxRows = 0;
  48. private int rowsFetched = 0;
  49. public HiveQueryResultSet(HiveInterface client, int maxRows) throws SQLException {
  50. this.client = client;
  51. this.maxRows = maxRows;
  52. initSerde();
  53. row = Arrays.asList(new Object[columnNames.size()]);
  54. }
  55. public HiveQueryResultSet(HiveInterface client) throws SQLException {
  56. this(client, 0);
  57. }
  58. /**
  59. * Instantiate the serde used to deserialize the result rows.
  60. */
  61. private void initSerde() throws SQLException {
  62. try {
  63. Schema fullSchema = client.getSchema();
  64. List<FieldSchema> schema = fullSchema.getFieldSchemas();
  65. columnNames = new ArrayList<String>();
  66. columnTypes = new ArrayList<String>();
  67. StringBuilder namesSb = new StringBuilder();
  68. StringBuilder typesSb = new StringBuilder();
  69. if ((schema != null) && (!schema.isEmpty())) {
  70. for (int pos = 0; pos < schema.size(); pos++) {
  71. if (pos != 0) {
  72. namesSb.append(",");
  73. typesSb.append(",");
  74. }
  75. columnNames.add(schema.get(pos).getName());
  76. columnTypes.add(schema.get(pos).getType());
  77. namesSb.append(schema.get(pos).getName());
  78. typesSb.append(schema.get(pos).getType());
  79. }
  80. }
  81. String names = namesSb.toString();
  82. String types = typesSb.toString();
  83. serde = new LazySimpleSerDe();
  84. Properties props = new Properties();
  85. if (names.length() > 0) {
  86. LOG.info("Column names: " + names);
  87. props.setProperty(Constants.LIST_COLUMNS, names);
  88. }
  89. if (types.length() > 0) {
  90. LOG.info("Column types: " + types);
  91. props.setProperty(Constants.LIST_COLUMN_TYPES, types);
  92. }
  93. serde.initialize(new Configuration(), props);
  94. } catch (Exception ex) {
  95. ex.printStackTrace();
  96. throw new SQLException("Could not create ResultSet: " + ex.getMessage());
  97. }
  98. }
  99. @Override
  100. public void close() throws SQLException {
  101. client = null;
  102. }
  103. /**
  104. * Moves the cursor down one row from its current position.
  105. *
  106. * @see java.sql.ResultSet#next()
  107. * @throws SQLException
  108. * if a database access error occurs.
  109. */
  110. public boolean next() throws SQLException {
  111. if (maxRows > 0 && rowsFetched >= maxRows) {
  112. return false;
  113. }
  114. String rowStr = "";
  115. try {
  116. rowStr = (String) client.fetchOne();
  117. rowsFetched++;
  118. if (LOG.isDebugEnabled()) {
  119. LOG.debug("Fetched row string: " + rowStr);
  120. }
  121. if (!"".equals(rowStr)) {
  122. StructObjectInspector soi = (StructObjectInspector) serde.getObjectInspector();
  123. List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
  124. Object data = serde.deserialize(new BytesWritable(rowStr.getBytes()));
  125. assert row.size() == fieldRefs.size() : row.size() + ", " + fieldRefs.size();
  126. for (int i = 0; i < fieldRefs.size(); i++) {
  127. StructField fieldRef = fieldRefs.get(i);
  128. ObjectInspector oi = fieldRef.getFieldObjectInspector();
  129. Object obj = soi.getStructFieldData(data, fieldRef);
  130. row.set(i, convertLazyToJava(obj, oi));
  131. }
  132. if (LOG.isDebugEnabled()) {
  133. LOG.debug("Deserialized row: " + row);
  134. }
  135. }
  136. } catch (Exception ex) {
  137. ex.printStackTrace();
  138. throw new SQLException("Error retrieving next row");
  139. }
  140. // NOTE: fetchOne dosn't throw new SQLException("Method not supported").
  141. return !"".equals(rowStr);
  142. }
  143. /**
  144. * Convert a LazyObject to a standard Java object in compliance with JDBC 3.0 (see JDBC 3.0
  145. * Specification, Table B-3: Mapping from JDBC Types to Java Object Types).
  146. *
  147. * This method is kept consistent with {@link HiveResultSetMetaData#hiveTypeToSqlType}.
  148. */
  149. private static Object convertLazyToJava(Object o, ObjectInspector oi) {
  150. Object obj = ObjectInspectorUtils.copyToStandardObject(o, oi, ObjectInspectorCopyOption.JAVA);
  151. // for now, expose non-primitive as a string
  152. // TODO: expose non-primitive as a structured object while maintaining JDBC compliance
  153. if (obj != null && oi.getCategory() != ObjectInspector.Category.PRIMITIVE) {
  154. obj = obj.toString();
  155. }
  156. return obj;
  157. }
  158. }