Flink1.11读取hive遇到的各种依赖问题

水平太低,走一步遇一个坎。难受的一匹。。

先说导入基础依赖:

干净版本:

<dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-json</artifactId>
            <version>${flink.version}</version>
            <!--<scope>provided</scope>-->
        </dependency>


        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-core</artifactId>
            <version>${flink.version}</version>
            <!--<scope>provided</scope>-->
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-api-scala-bridge_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-planner_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-table-common</artifactId>
            <version>1.11.0</version>
            <scope>provided</scope>
        </dependency>


        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>2.7.3</version>
            <scope>provided</scope>
        </dependency>


        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-streaming-scala_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-clients_${scala.binary.version}</artifactId>
            <version>${flink.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-connector-kafka_2.11</artifactId>
            <version>1.11.0</version>
            <scope>provided</scope>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-connector-hive -->
        <dependency>
            <groupId>org.apache.flink</groupId>
            <artifactId>flink-connector-hive_2.11</artifactId>
            <version>1.11.0</version>
            <scope>provided</scope>
        </dependency>


        <!-- https://mvnrepository.com/artifact/org.apache.hive/hive-exec -->
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-exec</artifactId>
            <version>2.1.1</version>
        </dependency>

执行的代码:

 public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        //构建EnvironmentSettings 并指定Blink Planner
        EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();

        //构建StreamTableEnvironment
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env, bsSettings);

        String name            = "myhive";
        String defaultDatabase = "default";
        String hiveConfDir     = "G:\\Flink SQL开发文件"; // a local path
        String version         = "1.1.0";

        HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version);

        //todo 注册一个
        tEnv.registerCatalog("myhive", hive);

        tEnv.useCatalog("myhive");
        tEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
        String  createDbSql = "SELECT  *  FROM  sample_07 ";


        Table table = tEnv.sqlQuery(createDbSql);

//        table.printSchema();
        DataStream<Tuple2<Boolean, Row>> demo = tEnv.toRetractStream(table, Row.class);
        demo.print("$$$$$$$$$$$:");
        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

不知道正在看这篇文章的你是否会遇到冲突问题,我是遇到了。

我是在加了很多依赖情况下加的hive依赖 ,直接运行找不到class,醉了。

后来经过调整 在添加hadoop部分依赖的情况下pom:

<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-connector-hive_2.11</artifactId>
    <version>${flink.version}</version>
    <scope>provided</scope>
</dependency>


<dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-common</artifactId>
    <version>2.6.0-cdh5.16.1</version>
    <scope>provided</scope>
</dependency>


<dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-exec</artifactId>
    <version>1.1.0</version>
    <exclusions>

        <exclusion>
            <groupId>org.pentaho</groupId>
            <artifactId>pentaho-aggdesigner-algorithm</artifactId>
        </exclusion>


        <exclusion>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-vector-code-gen</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-llap-tez</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-shims</artifactId>
        </exclusion>
        <exclusion>
            <groupId>commons-codec</groupId>
            <artifactId>commons-codec</artifactId>
        </exclusion>
        <exclusion>
            <groupId>commons-httpclient</groupId>
            <artifactId>commons-httpclient</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.logging.log4j</groupId>
            <artifactId>log4j-slf4j-impl</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.antlr</groupId>
            <artifactId>antlr-runtime</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.antlr</groupId>
            <artifactId>ST4</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.ant</groupId>
            <artifactId>ant</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.commons</groupId>
            <artifactId>commons-compress</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.ivy</groupId>
            <artifactId>ivy</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.zookeeper</groupId>
            <artifactId>zookeeper</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.curator</groupId>
            <artifactId>apache-curator</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.curator</groupId>
            <artifactId>curator-framework</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.codehaus.groovy</groupId>
            <artifactId>groovy-all</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.calcite</groupId>
            <artifactId>calcite-core</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.calcite</groupId>
            <artifactId>calcite-druid</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.calcite.avatica</groupId>
            <artifactId>avatica</artifactId>
        </exclusion>
        <exclusion>
            <groupId>org.apache.calcite</groupId>
            <artifactId>calcite-avatica</artifactId>
        </exclusion>
        <exclusion>
            <groupId>com.google.code.gson</groupId>
            <artifactId>gson</artifactId>
        </exclusion>
        <exclusion>
            <groupId>stax</groupId>
            <artifactId>stax-api</artifactId>
        </exclusion>
        <exclusion>
            <groupId>com.google.guava</groupId>
            <artifactId>guava</artifactId>
        </exclusion>


    </exclusions>
    <scope>provided</scope>
</dependency>

<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-json</artifactId>
    <version>${flink.version}</version>
    <!--<scope>provided</scope>-->
</dependency>


<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-core</artifactId>
    <version>${flink.version}</version>
    <!--<scope>provided</scope>-->
</dependency>
<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId>
    <version>${flink.version}</version>
</dependency>
<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-table-api-scala-bridge_${scala.binary.version}</artifactId>
    <version>${flink.version}</version>
</dependency>
<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-table-planner_${scala.binary.version}</artifactId>
    <version>${flink.version}</version>
</dependency>
<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
    <version>${flink.version}</version>
</dependency>

<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-table-common</artifactId>
    <version>1.11.0</version>
    <scope>provided</scope>
</dependency>


<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-streaming-scala_${scala.binary.version}</artifactId>
    <version>${flink.version}</version>
</dependency>

<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-clients_${scala.binary.version}</artifactId>
    <version>${flink.version}</version>
</dependency>


<dependency>
    <groupId>org.apache.flink</groupId>
    <artifactId>flink-connector-kafka_2.11</artifactId>
    <version>1.11.0</version>
    <scope>provided</scope>
</dependency>

最后,如果还是报错,请手动导入依赖包:

flink-shaded-hadoop-2-uber-2.6.5-10.0.jar

官网快速链接:

https://ci.apache.org/projects/flink/flink-docs-release-1.11/zh/dev/table/hive/

https://ci.apache.org/projects/flink/flink-docs-release-1.11/zh/ops/deployment/hadoop.html#providing-hadoop-classes

最后再加一个about云的案例:

https://www.aboutyun.com/forum.php?mod=viewthread&tid=29105&s_info=YTozOntzOjM6InVpZCI7czoxOiIxIjtzOjM6InRpZCI7aToyOTEwNTtzOjk6InRpbWVzdGFtcCI7aToxNTk0NjEwMjI4O30=&s_md5check=1d0ab980 

猜你喜欢

转载自blog.csdn.net/qq_31866793/article/details/107321041