SparkStreaming讀取外部文件

scala語言:

方法1

val path = "/data1/work/config.properties"
try {
  opinionDictPath = HDFSUtil.getProperties(path, "opinionDictPath")
  println("opinionDictPath 2 is " + opinionDictPath)
} catch {
  case exp: Exception => {
    println("Exception  " + exp.getMessage)
  }
}

其中path爲本地文件系統。

import java.io.IOException
import java.net.URI
import java.util.Properties

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FSDataInputStream, FileSystem, Path}

object HDFSUtil {
  val conf: Configuration = new Configuration
  var fs: FileSystem = null
  var hdfsInStream: FSDataInputStream = null
  val prop = new Properties()

  //獲取文件輸入流
  def getFSDataInputStream(path: String): FSDataInputStream = {
    try {
      fs = FileSystem.get(URI.create(path), conf)
      hdfsInStream = fs.open(new Path(path))
    } catch {
      case e: IOException => {
        e.printStackTrace
        print("IOException " + e.getMessage.toString)
      }
    }
    return hdfsInStream
  }

  //讀取配置文件
  def getProperties(path: String, key: String): String = {
    prop.load(this.getFSDataInputStream(path))
    prop.getProperty(key)
  }

}

方法2

try {
      import java.io.FileInputStream
      val props = new Properties()
      props.load(new FileInputStream(path))
      val opinionPath = props.getProperty("opinionDictPath")
      println("test opinionPath  " + opinionPath)
    } catch {
      case exp: Exception => {
        println("Exception is : " + exp.getMessage)
      }
    }

--files /data1/work/config.properties 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章