- 在hdfs上保存一个文件而非目录 ```scala package cn.spark
import org.apache.spark.sql.SparkSession
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext.
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.
import java.net.URI
import org.elasticsearch.spark. import org.elasticsearch.spark.sql. import org.apache.spark.sql.functions.{fromunixtime, date_format, current_timestamp, col, length, lit} import java.time.{ZonedDateTime, ZoneId, LocalDateTime} import java.time.format.DateTimeFormatter import java.io.
object json2es { @transient lazy val logger = Logger.getLogger(getClass.getName)
def main(args: Array[String]) { val spark = SparkSession.builder().appName(“Spark copy json samples to elasticsearch”).enableHiveSupport().getOrCreate() val sc = SparkContext.getOrCreate() val hdfsconf = new Configuration(); val fs = FileSystem.get(hdfsconf); val pushflagfile = “hdfs:///user/hive/warehouse/svReview/push.flag”
val output = fs.create(new Path(pushflagfile));
val os = new BufferedOutputStream(output)
os.write("false".getBytes("UTF-8"))
os.close()
} }
```