Commit a2d075eb authored by 박준형's avatar 박준형

Upload New File

parent 44fb85d7
package rtu
import org.apache.spark.sql.{DataFrame, Encoder}
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import java.util.Properties
import java.io.{FileInputStream, InputStreamReader}
import scala.collection.JavaConverters.propertiesAsScalaMapConverter
import scala.collection.mutable
object RTUStatistics {
def readProperties(file : String): mutable.Map[String, String] ={
val properties = new Properties
properties.load(new InputStreamReader(new FileInputStream(file), "utf8"))
val scalaProp = properties.asScala
println(scalaProp)
scalaProp
}
def process(propertyFile : String, inputDate : String) : Unit =
{
val properties = readProperties(propertyFile)
val name = properties.get("process.name")
val processType = properties.get("process.type")
if (name.isEmpty) {
throw new Exception("process name is null")
}
else {
if (name.get.equals("")) {
throw new Exception("process name is null")
}
}
val className = properties.get("process.class").get
val statisticsProcess = Class.forName(className).newInstance().asInstanceOf[StatisticsProcess]
var date = inputDate;
if (!inputDate.isEmpty && inputDate.length >= 8) {
if (!inputDate.contains("-")) {
date = inputDate.substring(0, 4) + "-" + inputDate.substring(4, 6) + "-" + inputDate.substring(6, 8)
}
}
var isLocal : Boolean = false
if (!processType.isEmpty){
isLocal = (processType.get.equals("local"))
}
statisticsProcess.init(name.get, isLocal, properties)
statisticsProcess.process(date)
}
def main(args : Array[String]) : Unit = {
val proppertiesFile = args.apply(0)
val inputDate = args.apply(1)
printf("properties : %s, date : %s \n",proppertiesFile, inputDate)
val rtuStatistics = RTUStatistics
rtuStatistics.process(proppertiesFile, inputDate)
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment