package com.shujia.spark.core import org.apache.spark.rdd.RDD import org.apache.spark.{SparkConf, SparkContext} import scala.util.Random object Demo18PI { def main(args: Array[String]): Unit = { val conf: SparkConf = new SparkConf() .setAppName("PI") .setMaster("local") val sc = new SparkContext(conf) val list: Range = 0 until 100000000 //构建一个很大的RDD val listRDD: RDD[Int] = sc.parallelize(list) //模拟生成点 val pointRDD: RDD[(Double, Double)] = listRDD.map(i => { //模拟点 val x: Double = Random.nextDouble() * 2 - 1 val y: Double = Random.nextDouble() * 2 - 1 (x, y) }) //取出圆内的点 val yuanPointRDD: RDD[(Double, Double)] = pointRDD.filter { case (x: Double, y: Double) => //计算点到圆心的距离 x * x + y * y < 1 } //计算PI val PI: Double = yuanPointRDD.count().todouble / list.length * 4.0 println("PI:"+PI) } }
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 [email protected] 举报,一经查实,本站将立刻删除。