Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
dlink
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
zhaowei
dlink
Commits
da2f4b61
Unverified
Commit
da2f4b61
authored
Mar 23, 2022
by
aiwenmo
Committed by
GitHub
Mar 23, 2022
Browse files
Options
Browse Files
Download
Plain Diff
[Fix-224] [metadata-hive] Fix HiveJDBC Multiple SQL Query
[Fix-224] [metadata-hive] Fix HiveJDBC Multiple SQL Query
parents
486174b1
8941927d
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
110 additions
and
9 deletions
+110
-9
ITypeConvert.java
...rc/main/java/com/dlink/metadata/convert/ITypeConvert.java
+1
-0
pom.xml
dlink-metadata/dlink-metadata-hive/pom.xml
+5
-1
HiveDriver.java
...e/src/main/java/com/dlink/metadata/driver/HiveDriver.java
+70
-8
HiveTest.java
...adata-hive/src/test/java/com/dlink/metadata/HiveTest.java
+34
-0
No files found.
dlink-metadata/dlink-metadata-base/src/main/java/com/dlink/metadata/convert/ITypeConvert.java
View file @
da2f4b61
...
@@ -37,6 +37,7 @@ public interface ITypeConvert {
...
@@ -37,6 +37,7 @@ public interface ITypeConvert {
case
"float"
:
case
"float"
:
return
results
.
getFloat
(
columnName
);
return
results
.
getFloat
(
columnName
);
case
"bigint"
:
case
"bigint"
:
return
results
.
getLong
(
columnName
);
case
"decimal"
:
case
"decimal"
:
return
results
.
getBigDecimal
(
columnName
);
return
results
.
getBigDecimal
(
columnName
);
case
"date"
:
case
"date"
:
...
...
dlink-metadata/dlink-metadata-hive/pom.xml
View file @
da2f4b61
...
@@ -21,7 +21,11 @@
...
@@ -21,7 +21,11 @@
<groupId>
com.dlink
</groupId>
<groupId>
com.dlink
</groupId>
<artifactId>
dlink-metadata-base
</artifactId>
<artifactId>
dlink-metadata-base
</artifactId>
</dependency>
</dependency>
<dependency>
<groupId>
com.alibaba
</groupId>
<artifactId>
druid-spring-boot-starter
</artifactId>
<scope>
provided
</scope>
</dependency>
<!-- <dependency>-->
<!-- <dependency>-->
<!-- <groupId>org.apache.hive</groupId>-->
<!-- <groupId>org.apache.hive</groupId>-->
<!-- <artifactId>hive-metastore</artifactId>-->
<!-- <artifactId>hive-metastore</artifactId>-->
...
...
dlink-metadata/dlink-metadata-hive/src/main/java/com/dlink/metadata/driver/HiveDriver.java
View file @
da2f4b61
...
@@ -6,19 +6,15 @@ import com.dlink.metadata.convert.HiveTypeConvert;
...
@@ -6,19 +6,15 @@ import com.dlink.metadata.convert.HiveTypeConvert;
import
com.dlink.metadata.convert.ITypeConvert
;
import
com.dlink.metadata.convert.ITypeConvert
;
import
com.dlink.metadata.query.HiveQuery
;
import
com.dlink.metadata.query.HiveQuery
;
import
com.dlink.metadata.query.IDBQuery
;
import
com.dlink.metadata.query.IDBQuery
;
import
com.dlink.metadata.result.JdbcSelectResult
;
import
com.dlink.model.Column
;
import
com.dlink.model.Column
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
import
com.dlink.model.Table
;
import
com.dlink.utils.LogUtil
;
import
org.apache.commons.lang3.StringUtils
;
import
org.apache.commons.lang3.StringUtils
;
import
java.sql.PreparedStatement
;
import
java.sql.*
;
import
java.sql.ResultSet
;
import
java.util.*
;
import
java.sql.ResultSetMetaData
;
import
java.sql.SQLException
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.List
;
import
java.util.Map
;
public
class
HiveDriver
extends
AbstractJdbcDriver
implements
Driver
{
public
class
HiveDriver
extends
AbstractJdbcDriver
implements
Driver
{
...
@@ -223,6 +219,72 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
...
@@ -223,6 +219,72 @@ public class HiveDriver extends AbstractJdbcDriver implements Driver {
return
createTable
.
toString
();
return
createTable
.
toString
();
}
}
@Override
public
int
executeUpdate
(
String
sql
)
throws
Exception
{
Asserts
.
checkNullString
(
sql
,
"Sql 语句为空"
);
String
querySQL
=
sql
.
trim
().
replaceAll
(
";$"
,
""
);
int
res
=
0
;
try
(
Statement
statement
=
conn
.
createStatement
())
{
res
=
statement
.
executeUpdate
(
querySQL
);
}
return
res
;
}
@Override
public
JdbcSelectResult
query
(
String
sql
,
Integer
limit
)
{
if
(
Asserts
.
isNull
(
limit
))
{
limit
=
100
;
}
JdbcSelectResult
result
=
new
JdbcSelectResult
();
List
<
LinkedHashMap
<
String
,
Object
>>
datas
=
new
ArrayList
<>();
List
<
Column
>
columns
=
new
ArrayList
<>();
List
<
String
>
columnNameList
=
new
ArrayList
<>();
PreparedStatement
preparedStatement
=
null
;
ResultSet
results
=
null
;
int
count
=
0
;
try
{
String
querySQL
=
sql
.
trim
().
replaceAll
(
";$"
,
""
);
preparedStatement
=
conn
.
prepareStatement
(
querySQL
);
results
=
preparedStatement
.
executeQuery
();
if
(
Asserts
.
isNull
(
results
))
{
result
.
setSuccess
(
true
);
close
(
preparedStatement
,
results
);
return
result
;
}
ResultSetMetaData
metaData
=
results
.
getMetaData
();
for
(
int
i
=
1
;
i
<=
metaData
.
getColumnCount
();
i
++)
{
columnNameList
.
add
(
metaData
.
getColumnLabel
(
i
));
Column
column
=
new
Column
();
column
.
setName
(
metaData
.
getColumnLabel
(
i
));
column
.
setType
(
metaData
.
getColumnTypeName
(
i
));
column
.
setAutoIncrement
(
metaData
.
isAutoIncrement
(
i
));
column
.
setNullable
(
metaData
.
isNullable
(
i
)
==
0
?
false
:
true
);
column
.
setJavaType
(
getTypeConvert
().
convert
(
column
));
columns
.
add
(
column
);
}
result
.
setColumns
(
columnNameList
);
while
(
results
.
next
())
{
LinkedHashMap
<
String
,
Object
>
data
=
new
LinkedHashMap
<>();
for
(
int
i
=
0
;
i
<
columns
.
size
();
i
++)
{
data
.
put
(
columns
.
get
(
i
).
getName
(),
getTypeConvert
().
convertValue
(
results
,
columns
.
get
(
i
).
getName
(),
columns
.
get
(
i
).
getType
()));
}
datas
.
add
(
data
);
count
++;
if
(
count
>=
limit
)
{
break
;
}
}
result
.
setSuccess
(
true
);
}
catch
(
Exception
e
)
{
result
.
setError
(
LogUtil
.
getError
(
e
));
result
.
setSuccess
(
false
);
}
finally
{
close
(
preparedStatement
,
results
);
result
.
setRowData
(
datas
);
return
result
;
}
}
@Override
@Override
public
IDBQuery
getDBQuery
()
{
public
IDBQuery
getDBQuery
()
{
return
new
HiveQuery
();
return
new
HiveQuery
();
...
...
dlink-metadata/dlink-metadata-hive/src/test/java/com/dlink/metadata/HiveTest.java
View file @
da2f4b61
...
@@ -2,12 +2,16 @@ package com.dlink.metadata;
...
@@ -2,12 +2,16 @@ package com.dlink.metadata;
import
com.dlink.metadata.driver.Driver
;
import
com.dlink.metadata.driver.Driver
;
import
com.dlink.metadata.driver.DriverConfig
;
import
com.dlink.metadata.driver.DriverConfig
;
import
com.dlink.metadata.result.JdbcSelectResult
;
import
com.dlink.model.Column
;
import
com.dlink.model.Column
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Schema
;
import
com.dlink.model.Table
;
import
com.dlink.model.Table
;
import
org.junit.Test
;
import
org.junit.Test
;
import
java.util.LinkedHashMap
;
import
java.util.List
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Set
;
/**
/**
* MysqlTest
* MysqlTest
...
@@ -105,4 +109,34 @@ public class HiveTest {
...
@@ -105,4 +109,34 @@ public class HiveTest {
System
.
out
.
println
(
column
.
getName
()+
"\t\t"
+
column
.
getType
()+
"\t\t"
+
column
.
getComment
());
System
.
out
.
println
(
column
.
getName
()+
"\t\t"
+
column
.
getType
()+
"\t\t"
+
column
.
getComment
());
}
}
}
}
/**
* @Author: zhumingye
* @date: 202/3/23
* @Description: 测试hive多条SQL执行
* @Param:
* @return:
*/
@Test
public
void
MultipleSQLTest
()
throws
Exception
{
Driver
driver
=
getDriver
();
String
sql
=
"select\n"
+
" date_format(create_time,'yyyy-MM') as pay_success_time,\n"
+
" sum(pay_amount)/100 as amount\n"
+
"from\n"
+
" odsp.pub_pay_mysql_pay_order\n"
+
" group by date_format(create_time,'yyyy-MM') ;\n"
+
"select\n"
+
" *\n"
+
"from\n"
+
" odsp.pub_pay_mysql_pay_order ;"
;
JdbcSelectResult
selectResult
=
driver
.
executeSql
(
sql
,
100
);
for
(
LinkedHashMap
<
String
,
Object
>
rowDatum
:
selectResult
.
getRowData
())
{
Set
<
Map
.
Entry
<
String
,
Object
>>
entrySet
=
rowDatum
.
entrySet
();
for
(
Map
.
Entry
<
String
,
Object
>
stringObjectEntry
:
entrySet
)
{
System
.
out
.
println
(
stringObjectEntry
.
getKey
()+
"\t\t"
+
stringObjectEntry
.
getValue
());
}
}
}
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment