PageRenderTime 33ms CodeModel.GetById 18ms app.highlight 11ms RepoModel.GetById 2ms app.codeStats 0ms

/tags/release-0.0.0-rc0/hive/external/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveQueryResultSet.java

#
Java | 181 lines | 118 code | 23 blank | 40 comment | 19 complexity | 33f0902f88c35c006240931809682e1a MD5 | raw file
  1/**
  2 * Licensed to the Apache Software Foundation (ASF) under one
  3 * or more contributor license agreements.  See the NOTICE file
  4 * distributed with this work for additional information
  5 * regarding copyright ownership.  The ASF licenses this file
  6 * to you under the Apache License, Version 2.0 (the
  7 * "License"); you may not use this file except in compliance
  8 * with the License.  You may obtain a copy of the License at
  9 *
 10 *     http://www.apache.org/licenses/LICENSE-2.0
 11 *
 12 * Unless required by applicable law or agreed to in writing, software
 13 * distributed under the License is distributed on an "AS IS" BASIS,
 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 15 * See the License for the specific language governing permissions and
 16 * limitations under the License.
 17 */
 18
 19package org.apache.hadoop.hive.jdbc;
 20
 21import java.sql.SQLException;
 22import java.util.ArrayList;
 23import java.util.Arrays;
 24import java.util.List;
 25import java.util.Properties;
 26
 27import org.apache.commons.logging.Log;
 28import org.apache.commons.logging.LogFactory;
 29import org.apache.hadoop.conf.Configuration;
 30import org.apache.hadoop.hive.metastore.api.FieldSchema;
 31import org.apache.hadoop.hive.metastore.api.Schema;
 32import org.apache.hadoop.hive.serde.Constants;
 33import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 34import org.apache.hadoop.hive.serde2.SerDe;
 35import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 36import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 37import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 38import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 39import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
 40import org.apache.hadoop.hive.service.HiveInterface;
 41import org.apache.hadoop.io.BytesWritable;
 42
 43/**
 44 * HiveQueryResultSet.
 45 *
 46 */
 47public class HiveQueryResultSet extends HiveBaseResultSet {
 48
 49  public static final Log LOG = LogFactory.getLog(HiveQueryResultSet.class);
 50
 51  private HiveInterface client;
 52  private SerDe serde;
 53
 54  private int maxRows = 0;
 55  private int rowsFetched = 0;
 56
 57  public HiveQueryResultSet(HiveInterface client, int maxRows) throws SQLException {
 58    this.client = client;
 59    this.maxRows = maxRows;
 60    initSerde();
 61    row = Arrays.asList(new Object[columnNames.size()]);
 62  }
 63
 64  public HiveQueryResultSet(HiveInterface client) throws SQLException {
 65    this(client, 0);
 66  }
 67
 68  /**
 69   * Instantiate the serde used to deserialize the result rows.
 70   */
 71  private void initSerde() throws SQLException {
 72    try {
 73      Schema fullSchema = client.getSchema();
 74      List<FieldSchema> schema = fullSchema.getFieldSchemas();
 75      columnNames = new ArrayList<String>();
 76      columnTypes = new ArrayList<String>();
 77      StringBuilder namesSb = new StringBuilder();
 78      StringBuilder typesSb = new StringBuilder();
 79
 80      if ((schema != null) && (!schema.isEmpty())) {
 81        for (int pos = 0; pos < schema.size(); pos++) {
 82          if (pos != 0) {
 83            namesSb.append(",");
 84            typesSb.append(",");
 85          }
 86          columnNames.add(schema.get(pos).getName());
 87          columnTypes.add(schema.get(pos).getType());
 88          namesSb.append(schema.get(pos).getName());
 89          typesSb.append(schema.get(pos).getType());
 90        }
 91      }
 92      String names = namesSb.toString();
 93      String types = typesSb.toString();
 94
 95      serde = new LazySimpleSerDe();
 96      Properties props = new Properties();
 97      if (names.length() > 0) {
 98        LOG.info("Column names: " + names);
 99        props.setProperty(Constants.LIST_COLUMNS, names);
100      }
101      if (types.length() > 0) {
102        LOG.info("Column types: " + types);
103        props.setProperty(Constants.LIST_COLUMN_TYPES, types);
104      }
105      serde.initialize(new Configuration(), props);
106
107    } catch (Exception ex) {
108      ex.printStackTrace();
109      throw new SQLException("Could not create ResultSet: " + ex.getMessage());
110    }
111  }
112
113  @Override
114  public void close() throws SQLException {
115    client = null;
116  }
117
118  /**
119   * Moves the cursor down one row from its current position.
120   *
121   * @see java.sql.ResultSet#next()
122   * @throws SQLException
123   *           if a database access error occurs.
124   */
125  public boolean next() throws SQLException {
126    if (maxRows > 0 && rowsFetched >= maxRows) {
127      return false;
128    }
129
130    String rowStr = "";
131    try {
132      rowStr = (String) client.fetchOne();
133      rowsFetched++;
134      if (LOG.isDebugEnabled()) {
135        LOG.debug("Fetched row string: " + rowStr);
136      }
137
138      if (!"".equals(rowStr)) {
139        StructObjectInspector soi = (StructObjectInspector) serde.getObjectInspector();
140        List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
141        Object data = serde.deserialize(new BytesWritable(rowStr.getBytes()));
142
143        assert row.size() == fieldRefs.size() : row.size() + ", " + fieldRefs.size();
144        for (int i = 0; i < fieldRefs.size(); i++) {
145          StructField fieldRef = fieldRefs.get(i);
146          ObjectInspector oi = fieldRef.getFieldObjectInspector();
147          Object obj = soi.getStructFieldData(data, fieldRef);
148          row.set(i, convertLazyToJava(obj, oi));
149        }
150
151        if (LOG.isDebugEnabled()) {
152          LOG.debug("Deserialized row: " + row);
153        }
154      }
155
156    } catch (Exception ex) {
157      ex.printStackTrace();
158      throw new SQLException("Error retrieving next row");
159    }
160    // NOTE: fetchOne dosn't throw new SQLException("Method not supported").
161    return !"".equals(rowStr);
162  }
163
164  /**
165   * Convert a LazyObject to a standard Java object in compliance with JDBC 3.0 (see JDBC 3.0
166   * Specification, Table B-3: Mapping from JDBC Types to Java Object Types).
167   *
168   * This method is kept consistent with {@link HiveResultSetMetaData#hiveTypeToSqlType}.
169   */
170  private static Object convertLazyToJava(Object o, ObjectInspector oi) {
171    Object obj = ObjectInspectorUtils.copyToStandardObject(o, oi, ObjectInspectorCopyOption.JAVA);
172
173    // for now, expose non-primitive as a string
174    // TODO: expose non-primitive as a structured object while maintaining JDBC compliance
175    if (obj != null && oi.getCategory() != ObjectInspector.Category.PRIMITIVE) {
176      obj = obj.toString();
177    }
178
179    return obj;
180  }
181}