Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Create spark data frame from custom data format

I have text file with String REC as the record delimiter and line break as the column delimiter, and every data has column name attached to it with comma as delimiter, below is the sample data format

REC
Id,19048
Term,milk
Rank,1
REC
Id,19049
Term,corn
Rank,5

Used REC as the record delimiter.Now, i want to create the spark data frame with column names ID, Term and Rank.Please Assist me on this.

like image 280
prakash Avatar asked Jan 20 '26 10:01

prakash


1 Answers

here is working code

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.{LongWritable, Text}
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.spark.{SparkConf, SparkContext}


object RecordSeparator extends App {
  var conf = new
      SparkConf().setAppName("test").setMaster("local[1]")
    .setExecutorEnv("executor- cores", "2")
  var sc = new SparkContext(conf)
  val hconf = new Configuration
  hconf.set("textinputformat.record.delimiter", "REC")
  val data = sc.newAPIHadoopFile("data.txt",
    classOf[TextInputFormat], classOf[LongWritable],
    classOf[Text], hconf).map(x => x._2.toString.trim).filter(x => x != "")
    .map(x => getRecord(x)).map(x => x.split(","))
    .map(x => record(x(0), x(2), x(2)))

  val sqlContext = new SQLContext(sc)
  val df = data.toDF()
  df.printSchema()
  df.show(false)

  def getRecord(in: String): String = {
    val ar = in.split("\n").mkString(",").split(",")
    val data = Array(ar(1), ar(3), ar(5))
    data.mkString(",")
  }
}

case class record(Id: String, Term: String, Rank: String)

Output:

 root
 |-- Id: string (nullable = true)
 |-- Term: string (nullable = true)
 |-- Rank: string (nullable = true)

+-----+----+----+
|Id   |Term|Rank|
+-----+----+----+
|19048|1   |1   |
|19049|5   |5   |
+-----+----+----+
like image 167
Narendra Parmar Avatar answered Jan 22 '26 00:01

Narendra Parmar