blog

Flink 1.11 ハイブテーブルAPI

hive-site.xmlに依存して、hive-site.xmlファイルをプロジェクトのlibディレクトリに置く Demo...

Jan 16, 2021 · 4 min. read
シェア

依赖

<properties>
 <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
 <flink.version>1.11.0</flink.version>
 <java.version>1.8</java.version>
 <scala.binary.version>2.11</scala.binary.version>
 <maven.compiler.source>${java.version}</maven.compiler.source>
 <maven.compiler.target>${java.version}</maven.compiler.target>
 <log4j.version>2.12.1</log4j.version>
 <hive.version>2.1.1-cdh6.2.1</hive.version>
 <hadoop.version>3.0.0-cdh6.2.1</hadoop.version>
</properties>
<repositories>
 <repository>
 	<id>apache.snapshots</id>
 	<name>Apache Development Snapshot Repository</name>
 	<url>https://..org/content/repositories/snapshots/</url>
 	<releases>
 		<enabled>false</enabled>
 	</releases>
 	<snapshots>
 		<enabled>true</enabled>
 	</snapshots>
 </repository>
 <!--CDHリポジトリの紹介>
 <repository>
 	<id>cloudera</id>
 	<url>https://..com/artifactory/cloudera-repos/</url>
 </repository>
</repositories>
<dependencies>
 <!-- Flink Table API Dependency -->
 <dependency>
 	<groupId>org.apache.flink</groupId>
 	<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
 	<version>${flink.version}</version>
 	<scope>provided</scope>
 </dependency>
 <dependency>
 	<groupId>org.apache.flink</groupId>
 	<artifactId>flink-connector-hive_${scala.binary.version}</artifactId>
 	<version>${flink.version}</version>
 	<scope>provided</scope>
 </dependency>
 <dependency>
 	<groupId>org.apache.flink</groupId>
 	<artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId>
 	<version>${flink.version}</version>
 	<scope>provided</scope>
 </dependency>
 <dependency>
 	<groupId>org.apache.flink</groupId>
 	<artifactId>flink-table-common</artifactId>
 	<version>${flink.version}</version>
 	<scope>provided</scope>
 </dependency>
 
 <!-- Hive Dependency -->
 <dependency>
 	<groupId>org.apache.hive</groupId>
 	<artifactId>hive-exec</artifactId>
 	<version>${hive.version}</version>
 	<scope>provided</scope>
 </dependency>
 <dependency>
 	<groupId>org.apache.hadoop</groupId>
 	<artifactId>hadoop-client</artifactId>
 	<version>${hadoop.version}</version>
 	<scope>provided</scope>
 </dependency>
 <!-- Apache Flink dependencies -->
 <dependency>
 	<groupId>org.apache.flink</groupId>
 	<artifactId>flink-java</artifactId>
 	<version>${flink.version}</version>
 	<scope>provided</scope>
 </dependency>
 <dependency>
 	<groupId>org.apache.flink</groupId>
 	<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
 	<version>${flink.version}</version>
 	<scope>provided</scope>
 </dependency>
 <dependency>
 	<groupId>org.apache.flink</groupId>
 	<artifactId>flink-clients_${scala.binary.version}</artifactId>
 	<version>${flink.version}</version>
 	<scope>provided</scope>
 </dependency>
 <dependency>
 	<groupId>org.apache.flink</groupId>
 	<artifactId>flink-streaming-scala_${scala.binary.version}</artifactId>
 	<version>${flink.version}</version>
 	<scope>provided</scope>
 </dependency>
 <!-- Add logging framework -->
 <dependency>
 	<groupId>org.apache.logging.log4j</groupId>
 	<artifactId>log4j-slf4j-impl</artifactId>
 	<version>${log4j.version}</version>
 	<scope>runtime</scope>
 </dependency>
 <dependency>
 	<groupId>org.apache.logging.log4j</groupId>
 	<artifactId>log4j-api</artifactId>
 	<version>${log4j.version}</version>
 	<scope>runtime</scope>
 </dependency>
 <dependency>
 	<groupId>org.apache.logging.log4j</groupId>
 	<artifactId>log4j-core</artifactId>
 	<version>${log4j.version}</version>
 	<scope>runtime</scope>
 </dependency>
</dependencies>

hive-site.xml

プロジェクトの lib ディレクトリに hive-site.xml ファイルを置きます。

Demo

public class HiveTest {
 public static void main(String[] args) throws Exception{
 EnvironmentSettings bbSettings = EnvironmentSettings.newInstance().inBatchMode().build();
 TableEnvironment tableEnv = TableEnvironment.create(bbSettings);
 String catalogName = "myhive";
 String defaultDatabase = "hive_data";
 String hiveConfDir = "lib"; // a local path
 String version = "2.1.1-cdh6.2.1";
 HiveCatalog hiveCatalog = new HiveCatalog(catalogName, defaultDatabase, hiveConfDir, version);
 tableEnv.registerCatalog(catalogName, hiveCatalog);
 // set the HiveCatalog as the current catalog of the session
 tableEnv.useCatalog(catalogName);
 // to use hive dialect
 tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
 tableEnv.executeSql("create table test_buck(id int, name string)
" +
 "row format delimited fields terminated by '\\s+'");
 tableEnv.executeSql("insert into table test_buck values (1,'') ");
 
 // TableResult tableResult = tableEnv.executeSql("select * from test_buck");
 // tableResult.print();
 }
}
Read next

幸福感を高めるいくつかの方法

最近、233ソースは実は忙しい。3ヶ月以上続いたプロジェクトの開発・再構築で、毎日一緒に残業したことのないパートナーに引っ張られて1095~10105くらいの残業リズム。彼は最近、基本的に私たちの会社で最も頻繁に残業で2年以上であると述べました。 それは最終的に今週オンラインになる予定でしたが、問題のnetty非同期コールバック不適切な使用のため、それは別の2日間かかった...

Jan 15, 2021 · 1 min read