Commit 8941927d authored by zhu-mingye's avatar zhu-mingye

## Purpose of the pull request

 Fix issues related to hive JDBC

## Brief change log

- 1. Fix bug of querying multiple SQL in hive JDBC mode
- 2. Fix the result set data type conversion of hive JDBC query results

## Verify this pull request
- Added dlink-metadata-hive tests for   MultipleSQLTest
parent 486174b1
......@@ -37,6 +37,7 @@ public interface ITypeConvert {
case "float":
return results.getFloat(columnName);
case "bigint":
return results.getLong(columnName);
case "decimal":
return results.getBigDecimal(columnName);
case "date":
......
......@@ -21,7 +21,11 @@
<groupId>com.dlink</groupId>
<artifactId>dlink-metadata-base</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<scope>provided</scope>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.apache.hive</groupId>-->
<!-- <artifactId>hive-metastore</artifactId>-->
......
......@@ -6,19 +6,15 @@ import com.dlink.metadata.convert.HiveTypeConvert;
import com.dlink.metadata.convert.ITypeConvert;
import com.dlink.metadata.query.HiveQuery;
import com.dlink.metadata.query.IDBQuery;
import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import com.dlink.utils.LogUtil;
import org.apache.commons.lang3.StringUtils;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.sql.*;
import java.util.*;
public class HiveDriver extends AbstractJdbcDriver implements Driver {
......@@ -223,6 +219,72 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
return createTable.toString();
}
@Override
public int executeUpdate(String sql) throws Exception {
Asserts.checkNullString(sql, "Sql 语句为空");
String querySQL = sql.trim().replaceAll(";$", "");
int res = 0;
try (Statement statement = conn.createStatement()) {
res = statement.executeUpdate(querySQL);
}
return res;
}
@Override
public JdbcSelectResult query(String sql, Integer limit) {
if (Asserts.isNull(limit)) {
limit = 100;
}
JdbcSelectResult result = new JdbcSelectResult();
List<LinkedHashMap<String, Object>> datas = new ArrayList<>();
List<Column> columns = new ArrayList<>();
List<String> columnNameList = new ArrayList<>();
PreparedStatement preparedStatement = null;
ResultSet results = null;
int count = 0;
try {
String querySQL = sql.trim().replaceAll(";$", "");
preparedStatement = conn.prepareStatement(querySQL);
results = preparedStatement.executeQuery();
if (Asserts.isNull(results)) {
result.setSuccess(true);
close(preparedStatement, results);
return result;
}
ResultSetMetaData metaData = results.getMetaData();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
columnNameList.add(metaData.getColumnLabel(i));
Column column = new Column();
column.setName(metaData.getColumnLabel(i));
column.setType(metaData.getColumnTypeName(i));
column.setAutoIncrement(metaData.isAutoIncrement(i));
column.setNullable(metaData.isNullable(i) == 0 ? false : true);
column.setJavaType(getTypeConvert().convert(column));
columns.add(column);
}
result.setColumns(columnNameList);
while (results.next()) {
LinkedHashMap<String, Object> data = new LinkedHashMap<>();
for (int i = 0; i < columns.size(); i++) {
data.put(columns.get(i).getName(), getTypeConvert().convertValue(results, columns.get(i).getName(), columns.get(i).getType()));
}
datas.add(data);
count++;
if (count >= limit) {
break;
}
}
result.setSuccess(true);
} catch (Exception e) {
result.setError(LogUtil.getError(e));
result.setSuccess(false);
} finally {
close(preparedStatement, results);
result.setRowData(datas);
return result;
} }
@Override
public IDBQuery getDBQuery() {
return new HiveQuery();
......
......@@ -2,12 +2,16 @@ package com.dlink.metadata;
import com.dlink.metadata.driver.Driver;
import com.dlink.metadata.driver.DriverConfig;
import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import org.junit.Test;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* MysqlTest
......@@ -105,4 +109,34 @@ public class HiveTest {
System.out.println(column.getName()+"\t\t"+column.getType()+"\t\t"+column.getComment());
}
}
/**
* @Author: zhumingye
* @date: 202/3/23
* @Description: 测试hive多条SQL执行
* @Param:
* @return:
*/
@Test
public void MultipleSQLTest() throws Exception {
Driver driver = getDriver();
String sql ="select\n" +
" date_format(create_time,'yyyy-MM') as pay_success_time,\n" +
" sum(pay_amount)/100 as amount\n" +
"from\n" +
" odsp.pub_pay_mysql_pay_order\n" +
" group by date_format(create_time,'yyyy-MM') ;\n" +
"select\n" +
" *\n" +
"from\n" +
" odsp.pub_pay_mysql_pay_order ;";
JdbcSelectResult selectResult = driver.executeSql(sql,100);
for (LinkedHashMap<String, Object> rowDatum : selectResult.getRowData()) {
Set<Map.Entry<String, Object>> entrySet = rowDatum.entrySet();
for (Map.Entry<String, Object> stringObjectEntry : entrySet) {
System.out.println(stringObjectEntry.getKey()+"\t\t"+stringObjectEntry.getValue());
}
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment