1. public class Application {
    2. private static String appName = "spark.demo";
    3. private static String master = "local[*]";
    4. public static void main(String[] args) {
    5. SparkSession spark = SparkSession
    6. .builder()
    7. .appName("Java Spark Hive Example")
    8. .master("local[*]")
    9. // .config("spark.sql.warehouse.dir","hdfs://172.29.108.184:8020/hive")
    10. .enableHiveSupport()
    11. .getOrCreate();
    12. spark.sql("show databases").show();
    13. spark.sql("show tables").show();
    14. spark.sql("desc tmp.parse_log").show();
    15. //
    16. //
    17. // JavaSparkContext sc = null;
    18. // try {
    19. // //初始化 JavaSparkContext
    20. // SparkConf conf = new SparkConf()
    21. // .setAppName(appName)
    22. // .setMaster(master)
    23. // .set("hive.metastore.uris", "thrift://172.29.108.183:9083")
    24. // .set("spark.sql.sources.partitionOverwriteMode", "dynamic");
    25. //
    26. // sc = new JavaSparkContext(conf);
    27. //
    28. // List<SqlJob.LogStore> list = SqlJob.getList();
    29. //
    30. // SQLContext sqlContext = new SQLContext(sc);
    31. // // 注册成表
    32. // Dataset<org.apache.spark.sql.Row> dataFrame = sqlContext.createDataFrame(list, SqlJob.LogStore.class);
    33. //
    34. // sqlContext.table("parse_log").show();
    35. // dataFrame.createOrReplaceTempView("tmp_table");
    36. // System.out.println( "注册表 OK" );
    37. // // 写入数据库
    38. // sqlContext.sql("INSERT OVERWRITE TABLE tmp.parse_log partition (day='20200331') select tableNames, sql from tmp_table");
    39. // sql.toDF().write().saveAsTable("tmp.parse_log");
    40. // System.out.println( "写入数据表 OK" );
    41. // sqlContext.sql("show tables").show();
    42. // } catch (Exception e) {
    43. // e.printStackTrace();
    44. // } finally {
    45. // if (sc != null) {
    46. // sc.close();
    47. // }
    48. // }
    49. }
    50. }
    1. ...
    2. <spark.version>2.4.0</spark.version>
    3. <dependency>
    4. <groupId>org.apache.spark</groupId>
    5. <artifactId>spark-core_2.11</artifactId>
    6. <version>${spark.version}</version>
    7. </dependency>
    8. <dependency>
    9. <groupId>org.apache.spark</groupId>
    10. <artifactId>spark-sql_2.11</artifactId>
    11. <version>${spark.version}</version>
    12. </dependency>
    13. <dependency>
    14. <groupId>org.apache.spark</groupId>
    15. <artifactId>spark-hive_2.11</artifactId>
    16. <version>${spark.version}</version>
    17. </dependency>
    18. ...

    远端hive配置hive-site.xml
    image.png