Java alibaba-Druid实现解析SQL中数据库Table, 数据库Database

版权声明:学习交流为主,未经博主同意禁止转载,禁止用于商用。 https://blog.csdn.net/u012965373/article/details/82260034
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.dialect.hive.parser.HiveStatementParser;
import com.alibaba.druid.sql.dialect.hive.visitor.HiveSchemaStatVisitor;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.stat.TableStat;
import model.ParserResult;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * @author yangxin_ryan
 * 阿里巴巴 Druid 开源组件,实现解析Hive,XXXXDB等(具体参考github文档)数据库SQL
 * dbTableFormat Function -> 判断输入的SQL是否是 DataBase.Table
 * sqlParser Function -> 解析输入的SQL,解析出对应的TableName 以及DataBase
 */
public class DruidUtil {

    private static Log LOG = LogFactory.getLog(DruidUtil.class);

    /**
     * chinese:检测输入的查询SQL中数据库与表组合形式是否正确
     * english:check the database and tables in search sql
     * @param sql
     * @return
     */
    public static String dbTableFormat(String sql){
        String message = "";
        SQLStatementParser parser = new HiveStatementParser(sql);
        SQLStatement statement = parser.parseStatement();
        HiveSchemaStatVisitor visitor = new HiveSchemaStatVisitor();
        statement.accept(visitor);
        Map<TableStat.Name, TableStat> tableOpt = visitor.getTables();
        LOG.info(tableOpt);
        if (tableOpt.isEmpty())
            return "抱歉,查询的SQL语句中数据库名与表名以下格式 :\n Database.Table 存在\n ,请修改后再进行查询!";
        for (TableStat.Name key: tableOpt.keySet()){
            if (!key.toString().contains(".")){
                message = "抱歉,查询的SQL语句中数据库名与表名以下格式 :\n Database.Table 存在\n ,请修改后再进行查询!";
                return message;
            }
        }
        return message;
    }

    /**
     * chinese:生成语法树来解析sql中的表,库,操作
     * english:use Grammar tree parser table, database, operation
     * @param sql
     * @return
     */
    public static List<ParserResult> sqlParser(String sql){
        List<ParserResult> parserResultList = new ArrayList<>();
        SQLStatementParser parser = new HiveStatementParser(sql);
        SQLStatement statement = parser.parseStatement();
        HiveSchemaStatVisitor visitor = new HiveSchemaStatVisitor();
        statement.accept(visitor);
        Map<TableStat.Name, TableStat> tableOpt = visitor.getTables();
        for (TableStat.Name key: tableOpt.keySet()){
            ParserResult parserResult = new ParserResult();
            if (key.toString().contains(".")) {
                LOG.info("Table And DB");
                String dbName = key.toString().split("\\.")[0];
                String tableName = key.toString().split("\\.")[1];
                String operation = visitor.getTableStat(key.toString()).toString();
                parserResult.setDbName(dbName);
                parserResult.setTableName(tableName);
                parserResult.setOperation(operation);
            } else {
                LOG.info("Only Table");
                String tableName = key.toString();
                parserResult.setTableName(tableName);
                parserResult.setDbName("");
                parserResult.setOperation("");
                LOG.info(tableName);
            }
            parserResultList.add(parserResult);
        }
        return parserResultList;
    }


    public static void main(String[] args) {
        String sql = "select * from dw.table";
        DruidUtil.sqlParser(sql);
    }
}

猜你喜欢

转载自blog.csdn.net/u012965373/article/details/82260034