/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.jdbc;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Schema;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.service.HiveInterface;
import org.apache.hadoop.io.BytesWritable;
public class HiveQueryResultSet extends HiveBaseResultSet{
public static final Log LOG;
private HiveInterface client;
private SerDe serde;
private int maxRows = 0;
private int rowsFetched = 0;
private Statement statement;
public HiveQueryResultSet(HiveInterface client, Statement statement, int maxRows) throws SQLException {
this(client, maxRows);
this.statement = statement;
}
public HiveQueryResultSet(HiveInterface client, int maxRows) throws SQLException {
this.client = client;
this.maxRows = maxRows;
initSerde();
this.row = Arrays.asList(new Object[this.columnNames.size()]);
}
public HiveQueryResultSet(HiveInterface client, Statement statement) throws SQLException {
this(client);
this.statement = statement;
}
public HiveQueryResultSet(HiveInterface client) throws SQLException {
this(client, 0);
}
private void initSerde() throws SQLException {
try {
Schema fullSchema = this.client.getSchema();
List schema = fullSchema.getFieldSchemas();
this.columnNames = new ArrayList();
this.columnTypes = new ArrayList();
StringBuilder namesSb = new StringBuilder();
StringBuilder typesSb = new StringBuilder();
if ((schema != null) && (!schema.isEmpty())) {
for (int pos = 0; pos < schema.size(); ++pos) {
if (pos != 0) {
namesSb.append(",");
typesSb.append(",");
}
this.columnNames.add(((FieldSchema)schema.get(pos)).getName());
this.columnTypes.add(((FieldSchema)schema.get(pos)).getType());
namesSb.append(((FieldSchema)schema.get(pos)).getName());
typesSb.append(((FieldSchema)schema.get(pos)).getType());
}
}
String names = namesSb.toString();
String types = typesSb.toString();
this.serde = new LazySimpleSerDe();
Properties props = new Properties();
if (names.length() > 0) {
LOG.info(new StringBuilder().append("Column names: ").append(names).toString());
props.setProperty("columns", names);
}
if (types.length() > 0) {
LOG.info(new StringBuilder().append("Column types: ").append(types).toString());
props.setProperty("columns.types", types);
}
this.serde.initialize(new Configuration(), props);
} catch (Exception ex) {
ex.printStackTrace();
throw new SQLException(new StringBuilder().append("Could not create ResultSet: ").append(ex.getMessage()).toString());
}
}
public void close() throws SQLException {
this.client = null;
}
public boolean next() throws SQLException {
if ((this.maxRows > 0) && (this.rowsFetched >= this.maxRows)) {
return false;
}
String rowStr = "";
try {
/** added by shingo.yamagami@ksk-sol.jp **/
try {
rowStr = this.client.fetchOne();
} catch (Exception exception) {
}
/** added by shingo.yamagami@ksk-sol.jp **/
this.rowsFetched += 1;
if (LOG.isDebugEnabled()) {
LOG.debug(new StringBuilder().append("Fetched row string: ").append(rowStr).toString());
}
if (!"".equals(rowStr)) {
StructObjectInspector soi = (StructObjectInspector)this.serde.getObjectInspector();
List fieldRefs = soi.getAllStructFieldRefs();
Object data = this.serde.deserialize(new BytesWritable(rowStr.getBytes()));
assert (this.row.size() == fieldRefs.size()) : new StringBuilder().append(this.row.size()).append(", ").append(fieldRefs.size()).toString();
for (int i = 0; i < fieldRefs.size(); ++i) {
StructField fieldRef = (StructField)fieldRefs.get(i);
ObjectInspector oi = fieldRef.getFieldObjectInspector();
Object obj = soi.getStructFieldData(data, fieldRef);
this.row.set(i, convertLazyToJava(obj, oi));
}
if (LOG.isDebugEnabled()) {
LOG.debug(new StringBuilder().append("Deserialized row: ").append(this.row).toString());
}
}
} catch (Exception ex) {
ex.printStackTrace();
throw new SQLException("Error retrieving next row");
}
return !"".equals(rowStr);
}
private static Object convertLazyToJava(Object o, ObjectInspector oi)
{
/* 186 */ Object obj = ObjectInspectorUtils.copyToStandardObject(o, oi, ObjectInspectorUtils.ObjectInspectorCopyOption.JAVA);
/* 190 */ if ((obj != null) && (oi.getCategory() != ObjectInspector.Category.PRIMITIVE)) {
/* 191 */ obj = obj.toString();
}
/* 194 */ return obj;
}
static { LOG = LogFactory.getLog(HiveQueryResultSet.class); }
@Override
public <T> T getObject(int arg0, Class<T> arg1) throws SQLException {
// TODO Auto-generated method stub
return null;
}
@Override
public <T> T getObject(String arg0, Class<T> arg1) throws SQLException {
// TODO Auto-generated method stub
return null;
}
}
没有合适的资源?快使用搜索试试~ 我知道了~
资源推荐
资源详情
资源评论
收起资源包目录
testHive.rar (28个子文件)
testHive
bin
test.class 2KB
DataBaseConnection.class 1KB
org
apache
hadoop
hive
jdbc
HiveQueryResultSet.class 8KB
.settings
org.eclipse.jdt.core.prefs 598B
src
test.java 2KB
DataBaseConnection.java 968B
org
apache
hadoop
hive
jdbc
HiveQueryResultSet.java 7KB
.project 384B
.classpath 2KB
lib
slf4j-api-1.5.6.jar 22KB
logback-access-0.9.15.jar 81KB
hive-jdbc-0.7.1.jar 55KB
hive-exec-0.7.1-cdh3u5.jar 3.2MB
logback-classic-0.9.15.jar 156KB
commons-logging-1.1.1.jar 59KB
org.apache.log4j.jar 351KB
hadoop-core-0.20.2-cdh3u5.jar 3.68MB
driver-14.jar 1.47MB
common-utils-1.0.jar 14KB
antlr-runtime-3.0.1.jar 103KB
commons-logging-api-1.1.jar 44KB
commons-lang-2.4.jar 256KB
jdo2-api-2.3-ec.jar 194KB
libfb303.jar 170KB
log4j-1.2.15.jar 383KB
hive-service-0.7.1-cdh3u5.jar 168KB
hive-metastore-0.7.1-cdh3u5.jar 1.47MB
logback-core-0.9.15.jar 232KB
共 28 条
- 1
cool86672
- 粉丝: 0
- 资源: 9
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
最新资源
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功
- 1
- 2
前往页