初始化 SparkContext
spark 1.6
1
2
3
4
5
6
| import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
val conf = new SparkConf().setMaster("local").setAppName("My App")
val sc = new SparkContext(conf)
|
word count
1
2
| val text = sc.textFile()
text.flatMap(_.split(" ")).map((_,1)).reduceByKey(_ + _)
|
启动参数
/usr/local/spark/bin/spark-submit –class Process
–master yarn-cluster
–name Process
–queue fetech
–num-executors 20
–driver-memory 5g
–executor-memory 4g
–executor-cores 2
–conf spark.default.parallelism=500
–conf spark.storage.memoryFraction=0.5 \