1. var sql = "select t1.registno,t1.accidentno,t1.policyno,t2.casetype as caseflag from ods_new.ods_car_lregist t1 left join ods_new.ods_car_lclaim t2 on t1.accidentno = t2.accidentno and t1.policyno = t2.policyno"
    2. var claimDf = spark.sql(sql)
    3. saveDf(claimDf, "tmp_claim")
    4. // 理赔 join 报案
    5. sql = "select t1.*,t2.frameno,t2.licenseno,t2.reportormobile as reportorMobilenumber,t2.comcode,t2.accidentno,t2.policyno,t2.damagetypedesc as damagetypename,t2.otherareaflag as isLocal, t2.firstsiteflag as isFirstScene from tmp_claim t1 left join ods_new.ods_car_lregist t2 on t1.accidentno = t2.accidentno and t1.registno = t2.registno";
    6. claimDf = makeJsonDf(sql, "incident")
    7. saveDf(claimDf, "tmp_claim")
    8. claimDf.show(10, false)
    9. // 查勘 join 车辆信息
    10. sql = "select t1.accidentno,t1.accidenttype,t1.damagetypecode,t1.surveyorname,t2.licenseno,t2.engineno,t2.drivername,t2.drivinglicenseno,t2.insuredcarflag from ods_new.ODS_CAR_LSURVEY t1 left join ods_new.ODS_CAR_LSURVEY_CAR t2 on t1.accidentno = t2.accidentno"
    11. val investigationDf = makeJsonDf(sql, "vehicleLossList")
    12. saveDf(investigationDf, "tmp_investigation")
    13. // 理赔 join 查勘
    14. sql = "select t1.*,t2.accidentno,t2.accidenttype,t2.damagetypecode,t2.surveyorname as checkerName,t2.vehicleLossList from tmp_claim t1 left join tmp_investigation t2 on t1.accidentno = t2.accidentno"
    15. claimDf = makeJsonDf(sql, "investigation")
    16. saveDf(claimDf, "tmp_claim")
    17. claimDf.show(10, false)

    中间表慎用同一张表,一旦重新计算,就会出现结果为空的情况