flink 1.10.0版 hive 1.2.1版
    参考博客:flink 对 hive的支持

    1. <?xml version="1.0" encoding="UTF-8"?>
    2. <project xmlns="http://maven.apache.org/POM/4.0.0"
    3. xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    4. xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    5. <modelVersion>4.0.0</modelVersion>
    6. <groupId>flink_example</groupId>
    7. <artifactId>flink</artifactId>
    8. <version>1.0-SNAPSHOT</version>
    9. <!--<properties>-->
    10. <!--<flink.version>1.10.0</flink.version>-->
    11. <!--<hive.version>1.2.1</hive.version>-->
    12. <!--<properties/>-->
    13. <properties>
    14. <flink.version>1.10.0</flink.version>
    15. <scala.binary.version>2.11</scala.binary.version>
    16. <kafka.version>2.2.0</kafka.version>
    17. <hive.version>1.2.1</hive.version>
    18. <hadoop.version>2.8.5</hadoop.version>
    19. </properties>
    20. <dependencies>
    21. <dependency>
    22. <groupId>org.apache.flink</groupId>
    23. <artifactId>flink-hadoop-fs</artifactId>
    24. <version>${flink.version}</version>
    25. </dependency>
    26. <dependency>
    27. <groupId>com.jolbox</groupId>
    28. <artifactId>bonecp</artifactId>
    29. <version>0.8.0.RELEASE</version>
    30. </dependency>
    31. <dependency>
    32. <groupId>com.twitter</groupId>
    33. <artifactId>parquet-hive-bundle</artifactId>
    34. <version>1.6.0</version>
    35. </dependency>
    36. <dependency>
    37. <groupId>org.apache.flink</groupId>
    38. <artifactId>flink-scala_2.11</artifactId>
    39. <version>${flink.version}</version>
    40. </dependency>
    41. <dependency>
    42. <groupId>org.apache.flink</groupId>
    43. <artifactId>flink-streaming-scala_2.11</artifactId>
    44. <version>${flink.version}</version>
    45. </dependency>
    46. <dependency>
    47. <groupId>org.apache.hadoop</groupId>
    48. <artifactId>hadoop-common</artifactId>
    49. <version>${hadoop.version}</version>
    50. </dependency>
    51. <dependency>
    52. <groupId>org.apache.hadoop</groupId>
    53. <artifactId>hadoop-hdfs</artifactId>
    54. <version>${hadoop.version}</version>
    55. </dependency>
    56. <dependency>
    57. <groupId>org.apache.hadoop</groupId>
    58. <artifactId>hadoop-client</artifactId>
    59. <version>${hadoop.version}</version>
    60. </dependency>
    61. <dependency>
    62. <groupId>org.apache.hadoop</groupId>
    63. <artifactId>hadoop-mapreduce-client-core</artifactId>
    64. <version>${hadoop.version}</version>
    65. </dependency>
    66. <dependency>
    67. <groupId>org.apache.hadoop</groupId>
    68. <artifactId>hadoop-client</artifactId>
    69. <version>${hadoop.version}</version>
    70. </dependency>
    71. <dependency>
    72. <groupId>org.apache.hive</groupId>
    73. <artifactId>hive-jdbc</artifactId>
    74. <version>${hive.version}</version>
    75. </dependency>
    76. <dependency>
    77. <groupId>org.apache.hive</groupId>
    78. <artifactId>hive-exec</artifactId>
    79. <version>${hive.version}</version>
    80. </dependency>
    81. <dependency>
    82. <groupId>org.apache.hive</groupId>
    83. <artifactId>hive-metastore</artifactId>
    84. <version>${hive.version}</version>
    85. </dependency>
    86. <dependency>
    87. <groupId>org.apache.hive</groupId>
    88. <artifactId>hive-cli</artifactId>
    89. <version>${hive.version}</version>
    90. </dependency>
    91. <dependency>
    92. <groupId>org.apache.hive</groupId>
    93. <artifactId>hive-common</artifactId>
    94. <version>${hive.version}</version>
    95. </dependency>
    96. <dependency>
    97. <groupId>org.apache.hive</groupId>
    98. <artifactId>hive-service</artifactId>
    99. <version>${hive.version}</version>
    100. </dependency>
    101. <dependency>
    102. <groupId>org.apache.hive</groupId>
    103. <artifactId>hive-shims</artifactId>
    104. <version>${hive.version}</version>
    105. </dependency>
    106. <dependency>
    107. <groupId>org.apache.hive.hcatalog</groupId>
    108. <artifactId>hive-hcatalog-core</artifactId>
    109. <version>${hive.version}</version>
    110. </dependency>
    111. <dependency>
    112. <groupId>org.apache.hive.hcatalog</groupId>
    113. <artifactId>hive-hcatalog</artifactId>
    114. <version>${hive.version}</version>
    115. </dependency>
    116. <dependency>
    117. <groupId>org.apache.thrift</groupId>
    118. <artifactId>libfb303</artifactId>
    119. <version>0.9.3</version>
    120. <type>pom</type>
    121. </dependency>
    122. <dependency>
    123. <groupId>org.apache.flink</groupId>
    124. <artifactId>flink-hadoop-compatibility_2.11</artifactId>
    125. <version>${flink.version}</version>
    126. </dependency>
    127. <dependency>
    128. <groupId>org.apache.flink</groupId>
    129. <artifactId>flink-shaded-hadoop-2-uber</artifactId>
    130. <version>2.7.5-8.0</version>
    131. </dependency>
    132. <!-- Flink Dependency -->
    133. <dependency>
    134. <groupId>org.apache.flink</groupId>
    135. <artifactId>flink-connector-hive_2.11</artifactId>
    136. <version>${flink.version}</version>
    137. </dependency>
    138. <dependency>
    139. <groupId>org.apache.flink</groupId>
    140. <artifactId>flink-table-api-java-bridge_2.11</artifactId>
    141. <version>${flink.version}</version>
    142. </dependency>
    143. <dependency>
    144. <groupId>org.apache.flink</groupId>
    145. <artifactId>flink-table-api-scala_2.11</artifactId>
    146. <version>${flink.version}</version>
    147. </dependency>
    148. <dependency>
    149. <groupId>org.apache.flink</groupId>
    150. <artifactId>flink-table-common</artifactId>
    151. <version>${flink.version}</version>
    152. </dependency>
    153. </dependencies>
    154. <build>
    155. <plugins>
    156. <!-- 该插件用于将 Scala 代码编译成 class 文件 -->
    157. <plugin>
    158. <groupId>net.alchim31.maven</groupId>
    159. <artifactId>scala-maven-plugin</artifactId>
    160. <version>3.4.6</version> <executions>
    161. <execution>
    162. <!-- 声明绑定到 maven 的 compile 阶段 --> <goals>
    163. <goal>testCompile</goal> </goals>
    164. </execution>
    165. </executions>
    166. </plugin>
    167. <plugin>
    168. <groupId>org.apache.maven.plugins</groupId>
    169. <artifactId>maven-assembly-plugin</artifactId>
    170. <version>3.0.0</version>
    171. <configuration>
    172. <descriptorRefs>
    173. <descriptorRef>jar-with-dependencies</descriptorRef>
    174. <!--<source>1.8</source>-->
    175. <!--<target>1.8</target>-->
    176. </descriptorRefs>
    177. </configuration>
    178. <executions><execution>
    179. <id>make-assembly</id>
    180. <phase>package</phase> <goals>
    181. <goal>single</goal> </goals>
    182. </execution> </executions>
    183. </plugin>
    184. <plugin>
    185. <artifactId>maven-dependency-plugin</artifactId>
    186. <configuration>
    187. <excludeTransitive>false</excludeTransitive>
    188. <stripVersion>true</stripVersion>
    189. <outputDirectory>./lib</outputDirectory>
    190. </configuration>
    191. </plugin>
    192. </plugins> </build>
    193. </project>

    代码01:

    1. package org.caip;
    2. import org.apache.flink.configuration.Configuration;
    3. import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
    4. import org.apache.flink.streaming.api.environment.StreamExecutionEnvironmentFactory;
    5. import org.apache.flink.table.api.EnvironmentSettings;
    6. import org.apache.flink.table.api.TableEnvironment;
    7. import org.apache.flink.table.catalog.hive.HiveCatalog;
    8. public class ReadHive {
    9. public static void main(String[] args) {
    10. EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().inStreamingMode().build();
    11. TableEnvironment tableEnv = TableEnvironment.create(settings);
    12. // Configuration configuration = tableEnv.getConfig().getConfiguration();
    13. // configuration.setString("streaming-mode","false");
    14. String name = "myhive";
    15. String defaultDatabase = "caip";
    16. String hiveConfDir = "/Users/gaozhen/IdeaProjects/flink/src/main/resources"; // a local path
    17. String version = "1.2.1";
    18. HiveCatalog hive = new HiveCatalog(name, defaultDatabase, hiveConfDir, version);
    19. tableEnv.registerCatalog("myhive", hive);
    20. tableEnv.useCatalog("myhive");
    21. String database[] = tableEnv.listDatabases();
    22. System.out.println(database);
    23. // set the HiveCatalog as the current catalog of the session
    24. tableEnv.useCatalog("myhive");
    25. }
    26. }