Commit 23985083 authored by lcg's avatar lcg

Phoenix metadata

parent 3cfcdbc1
......@@ -70,7 +70,7 @@ public class DorisTest {
JdbcSelectResult selectResult = driver.query("select * from scoreinfo ", 10);
List<LinkedHashMap<String, Object>> rowData = selectResult.getRowData();
for (LinkedHashMap<String, Object> rowDatum : rowData) {
System.out.println(rowData);
System.out.println(rowDatum);
}
}
}
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dlink-metadata</artifactId>
<groupId>com.dlink</groupId>
<version>0.6.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dlink-metadata-phoenix</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>com.dlink</groupId>
<artifactId>dlink-metadata-base</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-core</artifactId>
<version>4.14.2-HBase-1.4</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>
\ No newline at end of file
package com.dlink.metadata.constant;
/**
*
* @author lcg
* @operate Phoenix常量
* @date 2022/2/16 14:19
* @return
*/
public interface PhoenixConstant {
/**
* 不指定schema列信息模板SQL
*/
String QUERY_COLUMNS_SQL_DEFAULT = " select COLUMN_NAME,COLUMN_FAMILY,DATA_TYPE,KEY_SEQ,NULLABLE, '' as CHARACTER_SET_NAME," +
" '' as COLLATION_NAME ,'' as ORDINAL_POSITION , 0 as NUMERIC_PRECISION, 0 as NUMERIC_SCALE, '' as AUTO_INCREMENT from SYSTEM.CATALOG where TABLE_NAME='%s' and COLUMN_NAME is not null ";
/**
* 查询默认指定列信息模板SQL
*/
String QUERY_COLUMNS_SQL = QUERY_COLUMNS_SQL_DEFAULT + " AND TABLE_SCHEM = '%s' ";
/**
* 查询schema模板SQL
*/
String QUERY_SCHEMA_SQL = " select distinct(TABLE_SCHEM) as TABLE_SCHEM from SYSTEM.CATALOG where TABLE_SCHEM is not null and TABLE_SCHEM <> 'SYSTEM' ";
/**
* 不指定schema查询table信息模板SQL
*/
String QUERY_TABLE_BY_SCHEMA_SQL_DEFAULT = " select TABLE_NAME,TABLE_SCHEM,TABLE_TYPE,SCOPE_CATALOG as CATALOG,'' as ENGINE,'' as OPTIONS, 0 as ROWSNUM, null as CREATE_TIME, null as UPDATE_TIME from SYSTEM.CATALOG where TABLE_TYPE in ('u','v') ";
/**
* 根据schema查询table信息模板SQL
*/
String QUERY_TABLE_BY_SCHEMA_SQL = QUERY_TABLE_BY_SCHEMA_SQL_DEFAULT + " AND TABLE_SCHEM = '%s' ";
/**
* Phoenix的driver
*/
String PHOENIX_DRIVER = "org.apache.phoenix.jdbc.PhoenixDriver";
}
package com.dlink.metadata.constant;
/**
*
* @author lcg
* @operate Phoenix常用数据类型及对应code
* @date 2022/2/16 16:49
* @return
*/
public enum PhoenixEnum {
INTEGER(4),
BIGINT(-5),
TINYINT(-6),
SMALLINT(5),
FLOAT(6),
DOUBLE(8),
DECIMAL(3),
BOOLEAN(16),
TIME(92),
DATE(91),
TIMESTAMP(93),
VARCHAR(12),
CHAR(1),
BINARY(-2),
VARBINARY(-3);
int dataTypeCode;
PhoenixEnum(int dataTypeCode) {
this.dataTypeCode = dataTypeCode;
}
public int getDataTypeCode(){
return dataTypeCode;
}
/**
* 获取数字 对应的数据类型 默认返回VARCHAR(无对应) , 传参为空时返回为null
* @param dataTypeCode
* @return
*/
public static PhoenixEnum getDataTypeEnum(Integer dataTypeCode){
if(dataTypeCode == null){
return null;
}else {
for (PhoenixEnum typeEnum : PhoenixEnum.values()) {
if(dataTypeCode.equals(typeEnum.dataTypeCode)){
return typeEnum;
}
}
}
return PhoenixEnum.VARCHAR;
}
}
package com.dlink.metadata.convert;
import com.dlink.metadata.constant.PhoenixEnum;
import com.dlink.metadata.rules.DbColumnType;
import com.dlink.metadata.rules.IColumnType;
public class PhoenixTypeConvert implements ITypeConvert {
@Override
public IColumnType convert(String columnType) {
String t = columnType.toLowerCase();
if (t.contains("char") || t.contains("varchar") || t.contains("text") ||
t.contains("nchar") || t.contains("nvarchar") || t.contains("ntext")
|| t.contains("uniqueidentifier") || t.contains("sql_variant")) {
return DbColumnType.STRING;
} else if (t.contains("int") || t.contains("tinyint") || t.contains("smallint") || t.contains("integer")) {
return DbColumnType.INTEGER;
} else if (t.contains("bigint")) {
return DbColumnType.LONG;
} else if (t.contains("float")) {
return DbColumnType.FLOAT;
} else if (t.contains("decimal") || t.contains("money") || t.contains("smallmoney")
|| t.contains("numeric")) {
return DbColumnType.BIG_DECIMAL;
} else if (t.contains("double")) {
return DbColumnType.DOUBLE;
} else if (t.contains("boolean")) {
return DbColumnType.BOOLEAN;
} else if (t.contains("time")) {
return DbColumnType.TIME;
} else if (t.contains("date")) {
return DbColumnType.DATE;
} else if (t.contains("smalldatetime") || t.contains("datetime")) {
return DbColumnType.TIMESTAMP;
} else if (t.contains("timestamp") || t.contains("binary") || t.contains("varbinary") || t.contains("image")) {
return DbColumnType.BYTE_ARRAY;
}
return DbColumnType.STRING;
}
@Override
public String convertToDB(String columnType) {
try {
Integer typeNum = Integer.valueOf(columnType);
return PhoenixEnum.getDataTypeEnum(typeNum).toString();
} catch (Exception e) {
switch (columnType.toUpperCase()) {
case "UNSIGNED_INT":
return "UNSIGNED_INT";
case "INT":
case "INTEGER":
return "INTEGER";
case "TINYINT":
return "TINYINT";
case "double":
return "double";
case "BIGINT":
return "BIGINT";
case "UNSIGNED_TINYINT":
return "UNSIGNED_TINYINT";
case "UNSIGNED_SMALLINT":
return "UNSIGNED_SMALLINT";
case "FLOAT":
return "FLOAT";
case "UNSIGNED_FLOAT":
return "UNSIGNED_FLOAT";
case "DOUBLE":
return "DOUBLE";
case "UNSIGNED_DOUBLE":
return "UNSIGNED_DOUBLE";
case "DECIMAL":
return "DECIMAL";
case "BOOLEAN":
return "BOOLEAN";
case "TIME ":
return "TIME ";
case "DATE":
return "DATE";
case "TIMESTAMP":
return "TIMESTAMP";
case "UNSIGNED_TIME":
return "UNSIGNED_TIME";
case "UNSIGNED_DATE":
return "UNSIGNED_DATE";
case "UNSIGNED_TIMESTAMP":
return "UNSIGNED_TIMESTAMP";
case "VARCHAR":
return "VARCHAR";
case "CHAR":
return "VARCHAR";
case "BINARY":
return "BINARY";
case "VARBINARY":
return "VARBINARY";
default:
return "VARBINARY";
}
}
}
}
package com.dlink.metadata.driver;
import com.dlink.metadata.constant.PhoenixConstant;
import com.dlink.metadata.convert.ITypeConvert;
import com.dlink.metadata.convert.PhoenixTypeConvert;
import com.dlink.metadata.query.IDBQuery;
import com.dlink.metadata.query.PhoenixQuery;
import com.dlink.model.Column;
import com.dlink.model.Table;
import java.util.List;
/**
*
* @author lcg
* @operate
* @date 2022/2/16 16:50
* @return
*/
public class PhoenixDriver extends AbstractJdbcDriver {
@Override
public IDBQuery getDBQuery() {
return new PhoenixQuery();
}
@Override
public ITypeConvert getTypeConvert() {
return new PhoenixTypeConvert();
}
@Override
String getDriverClass() {
return PhoenixConstant.PHOENIX_DRIVER;
}
@Override
public String getType() {
return "Phoenix";
}
@Override
public String getName() {
return "Phoenix";
}
@Override
public String getCreateTableSql(Table table) {
StringBuilder sql = new StringBuilder();
List<Column> columns = table.getColumns();
sql.append(" CREATE VIEW IF NOT EXISTS \"" + table.getName() + "\" ( ");
sql.append(" rowkey varchar primary key ");
PhoenixTypeConvert phoenixTypeConvert = new PhoenixTypeConvert();
if (columns != null) {
for (Column column : columns) {
sql.append(", \"" + column.getColumnFamily() + "\".\"" + column.getName() + "\" " + phoenixTypeConvert.convertToDB(column.getType()));
}
}
sql.append(" ) ");
return sql.toString();
}
}
package com.dlink.metadata.query;
import com.dlink.metadata.constant.PhoenixConstant;
/**
* @author lcg
* @operate
* @date 2022/2/16 14:39
* @return
*/
public class PhoenixQuery extends AbstractDBQuery {
@Override
public String schemaAllSql() {
return PhoenixConstant.QUERY_SCHEMA_SQL;
}
@Override
public String tablesSql(String schemaName) {
if (schemaName == null || schemaName.isEmpty()) {
return PhoenixConstant.QUERY_TABLE_BY_SCHEMA_SQL_DEFAULT;
}
return String.format(PhoenixConstant.QUERY_TABLE_BY_SCHEMA_SQL, schemaName);
}
@Override
public String columnsSql(String schemaName, String tableName) {
if (schemaName == null || schemaName.isEmpty()) {
return String.format(PhoenixConstant.QUERY_COLUMNS_SQL_DEFAULT, tableName);
}
return String.format(PhoenixConstant.QUERY_COLUMNS_SQL, tableName, schemaName);
}
@Override
public String schemaName() {
return "TABLE_SCHEM";
}
@Override
public String tableName() {
return "TABLE_NAME";
}
@Override
public String tableType() {
return "TABLE_TYPE";
}
@Override
public String tableComment() {
return "TABLE_NAME";
}
@Override
public String columnName() {
return "COLUMN_NAME";
}
@Override
public String columnType() {
return "DATA_TYPE";
}
@Override
public String columnComment() {
return "COLUMN_NAME";
}
@Override
public String columnKey() {
return "KEY_SEQ";
}
public String isNullable() {
return "NULLABLE";
}
@Override
public String rows() {
return "ROWSNUM";
}
}
package com.dlink.metadata;
import com.dlink.metadata.driver.Driver;
import com.dlink.metadata.driver.DriverConfig;
import com.dlink.metadata.result.JdbcSelectResult;
import com.dlink.model.Column;
import com.dlink.model.Schema;
import com.dlink.model.Table;
import org.junit.Before;
import org.junit.Test;
import java.util.LinkedHashMap;
import java.util.List;
public class PhoenixTest {
private Driver driver;
@Before
public void init() {
DriverConfig config = new DriverConfig();
config.setType("Phoenix");
config.setUrl("jdbc:phoenix:xxx");
try {
driver = Driver.build(config).connect();
} catch (Exception e) {
System.err.println("连接创建失败:" + e.getMessage());
}
}
@Test
public void testSchema() {
//schema && table
List<Schema> schemasAndTables = driver.getSchemasAndTables();
for (Schema schemasAndTable : schemasAndTables) {
List<Table> tables = schemasAndTable.getTables();
for (Table table : tables) {
System.out.println(table.getName() + " " + table.getSchema());
}
}
}
@Test
public void testListTables() {
List<Table> tables = driver.listTables("");
for (Table table : tables) {
System.out.println(table.getName() + " " + table.getSchema());
}
}
@Test
public void testColumns() {
// columns
List<Column> columns = driver.listColumns(null, "ODS_OUTP_PRESC");
for (Column column : columns) {
System.out.println(column.getName() + " " + column.getType() + " " + column.getComment());
}
}
@Test
public void query() {
JdbcSelectResult selectResult = driver.query("select * from ODS_OUTP_PRESC ", 10);
List<LinkedHashMap<String, Object>> rowData = selectResult.getRowData();
for (LinkedHashMap<String, Object> rowDatum : rowData) {
System.out.println(rowDatum);
}
}
}
......@@ -72,7 +72,7 @@ public class SqlServerTest {
JdbcSelectResult selectResult = driver.query("select * from t_user", 10);
List<LinkedHashMap<String, Object>> rowData = selectResult.getRowData();
for (LinkedHashMap<String, Object> rowDatum : rowData) {
System.out.println(rowData);
System.out.println(rowDatum);
}
}
......
......@@ -19,6 +19,7 @@
<module>dlink-metadata-postgresql</module>
<module>dlink-metadata-sqlserver</module>
<module>dlink-metadata-doris</module>
<module>dlink-metadata-phoenix</module>
</modules>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment